forked from mirrors/pronouns.cc
feat(backend): use jsonb instead of composite type arrays
This commit is contained in:
parent
f358a56053
commit
b8a7e7443d
14 changed files with 161 additions and 1467 deletions
|
@ -7,12 +7,12 @@ import (
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"codeberg.org/u1f320/pronouns.cc/backend/db/queries"
|
|
||||||
"codeberg.org/u1f320/pronouns.cc/backend/log"
|
"codeberg.org/u1f320/pronouns.cc/backend/log"
|
||||||
"emperror.dev/errors"
|
"emperror.dev/errors"
|
||||||
"github.com/Masterminds/squirrel"
|
"github.com/Masterminds/squirrel"
|
||||||
"github.com/jackc/pgconn"
|
"github.com/jackc/pgconn"
|
||||||
"github.com/jackc/pgx/v4"
|
"github.com/jackc/pgx/v4"
|
||||||
|
"github.com/jackc/pgx/v4/log/zapadapter"
|
||||||
"github.com/jackc/pgx/v4/pgxpool"
|
"github.com/jackc/pgx/v4/pgxpool"
|
||||||
"github.com/mediocregopher/radix/v4"
|
"github.com/mediocregopher/radix/v4"
|
||||||
"github.com/minio/minio-go/v7"
|
"github.com/minio/minio-go/v7"
|
||||||
|
@ -23,10 +23,8 @@ var sq = squirrel.StatementBuilder.PlaceholderFormat(squirrel.Dollar)
|
||||||
|
|
||||||
const ErrNothingToUpdate = errors.Sentinel("nothing to update")
|
const ErrNothingToUpdate = errors.Sentinel("nothing to update")
|
||||||
|
|
||||||
type querier interface {
|
type Execer interface {
|
||||||
Query(ctx context.Context, sql string, args ...interface{}) (pgx.Rows, error)
|
Exec(ctx context.Context, sql string, arguments ...interface{}) (commandTag pgconn.CommandTag, err error)
|
||||||
QueryRow(ctx context.Context, sql string, args ...interface{}) pgx.Row
|
|
||||||
Exec(ctx context.Context, sql string, arguments ...interface{}) (pgconn.CommandTag, error)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type DB struct {
|
type DB struct {
|
||||||
|
@ -37,26 +35,26 @@ type DB struct {
|
||||||
minio *minio.Client
|
minio *minio.Client
|
||||||
minioBucket string
|
minioBucket string
|
||||||
baseURL *url.URL
|
baseURL *url.URL
|
||||||
|
|
||||||
q queries.Querier
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func New() (*DB, error) {
|
func New() (*DB, error) {
|
||||||
pool, err := pgxpool.Connect(context.Background(), os.Getenv("DATABASE_URL"))
|
pgxCfg, err := pgxpool.ParseConfig(os.Getenv("DATABASE_URL"))
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "parsing config")
|
||||||
|
}
|
||||||
|
pgxCfg.ConnConfig.LogLevel = pgx.LogLevelDebug
|
||||||
|
pgxCfg.ConnConfig.Logger = zapadapter.NewLogger(log.Logger)
|
||||||
|
|
||||||
|
pool, err := pgxpool.ConnectConfig(context.Background(), pgxCfg)
|
||||||
|
// pool, err := pgxpool.Connect(context.Background(), os.Getenv("DATABASE_URL"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrap(err, "creating postgres client")
|
return nil, errors.Wrap(err, "creating postgres client")
|
||||||
}
|
}
|
||||||
|
|
||||||
var redis radix.Client
|
redis, err := (&radix.PoolConfig{}).New(context.Background(), "tcp", os.Getenv("REDIS"))
|
||||||
if os.Getenv("REDIS") != "" {
|
|
||||||
redis, err = (&radix.PoolConfig{}).New(context.Background(), "tcp", os.Getenv("REDIS"))
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrap(err, "creating redis client")
|
return nil, errors.Wrap(err, "creating redis client")
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
log.Warn("$REDIS was empty! Any functionality using Redis (such as authentication) will not work")
|
|
||||||
redis = &dummyRedis{}
|
|
||||||
}
|
|
||||||
|
|
||||||
minioClient, err := minio.New(os.Getenv("MINIO_ENDPOINT"), &minio.Options{
|
minioClient, err := minio.New(os.Getenv("MINIO_ENDPOINT"), &minio.Options{
|
||||||
Creds: credentials.NewStaticV4(os.Getenv("MINIO_ACCESS_KEY_ID"), os.Getenv("MINIO_ACCESS_KEY_SECRET"), ""),
|
Creds: credentials.NewStaticV4(os.Getenv("MINIO_ACCESS_KEY_ID"), os.Getenv("MINIO_ACCESS_KEY_SECRET"), ""),
|
||||||
|
@ -78,8 +76,6 @@ func New() (*DB, error) {
|
||||||
minio: minioClient,
|
minio: minioClient,
|
||||||
minioBucket: os.Getenv("MINIO_BUCKET"),
|
minioBucket: os.Getenv("MINIO_BUCKET"),
|
||||||
baseURL: baseURL,
|
baseURL: baseURL,
|
||||||
|
|
||||||
q: queries.NewQuerier(pool),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return db, nil
|
return db, nil
|
||||||
|
@ -162,3 +158,12 @@ func (db *DB) GetDelJSON(ctx context.Context, key string, v any) error {
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NotNull is a little helper that returns an *empty slice* when the slice's length is 0.
|
||||||
|
// This is to prevent nil slices from being marshaled as JSON null
|
||||||
|
func NotNull[T any](slice []T) []T {
|
||||||
|
if len(slice) == 0 {
|
||||||
|
return []T{}
|
||||||
|
}
|
||||||
|
return slice
|
||||||
|
}
|
||||||
|
|
|
@ -3,8 +3,6 @@ package db
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"codeberg.org/u1f320/pronouns.cc/backend/db/queries"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type WordStatus int
|
type WordStatus int
|
||||||
|
@ -80,38 +78,3 @@ func (p PronounEntry) String() string {
|
||||||
|
|
||||||
return strings.Join(split[:1], "/")
|
return strings.Join(split[:1], "/")
|
||||||
}
|
}
|
||||||
|
|
||||||
func dbEntriesToFieldEntries(entries []queries.FieldEntry) []FieldEntry {
|
|
||||||
out := make([]FieldEntry, len(entries))
|
|
||||||
for i := range entries {
|
|
||||||
out[i] = FieldEntry{
|
|
||||||
*entries[i].Value, WordStatus(*entries[i].Status),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return out
|
|
||||||
}
|
|
||||||
|
|
||||||
func entriesToDBEntries(entries []FieldEntry) []queries.FieldEntry {
|
|
||||||
out := make([]queries.FieldEntry, len(entries))
|
|
||||||
for i := range entries {
|
|
||||||
status := int32(entries[i].Status)
|
|
||||||
out[i] = queries.FieldEntry{
|
|
||||||
Value: &entries[i].Value,
|
|
||||||
Status: &status,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return out
|
|
||||||
}
|
|
||||||
|
|
||||||
func pronounEntriesToDBEntries(entries []PronounEntry) []queries.PronounEntry {
|
|
||||||
out := make([]queries.PronounEntry, len(entries))
|
|
||||||
for i := range entries {
|
|
||||||
status := int32(entries[i].Status)
|
|
||||||
out[i] = queries.PronounEntry{
|
|
||||||
Value: &entries[i].Pronouns,
|
|
||||||
DisplayValue: entries[i].DisplayText,
|
|
||||||
Status: &status,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return out
|
|
||||||
}
|
|
||||||
|
|
|
@ -4,8 +4,8 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"codeberg.org/u1f320/pronouns.cc/backend/db/queries"
|
|
||||||
"emperror.dev/errors"
|
"emperror.dev/errors"
|
||||||
|
"github.com/georgysavva/scany/pgxscan"
|
||||||
"github.com/jackc/pgx/v4"
|
"github.com/jackc/pgx/v4"
|
||||||
"github.com/rs/xid"
|
"github.com/rs/xid"
|
||||||
)
|
)
|
||||||
|
@ -52,18 +52,14 @@ func (f Field) Validate() string {
|
||||||
|
|
||||||
// UserFields returns the fields associated with the given user ID.
|
// UserFields returns the fields associated with the given user ID.
|
||||||
func (db *DB) UserFields(ctx context.Context, id xid.ID) (fs []Field, err error) {
|
func (db *DB) UserFields(ctx context.Context, id xid.ID) (fs []Field, err error) {
|
||||||
qfields, err := db.q.GetUserFields(ctx, id.String())
|
sql, args, err := sq.Select("*").From("user_fields").Where("user_id = ?", id).OrderBy("id").ToSql()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrap(err, "querying fields")
|
return fs, errors.Wrap(err, "building sql")
|
||||||
}
|
}
|
||||||
|
|
||||||
fs = make([]Field, len(qfields))
|
err = pgxscan.Select(ctx, db, &fs, sql, args...)
|
||||||
for i := range qfields {
|
if err != nil {
|
||||||
fs[i] = Field{
|
return fs, errors.Wrap(err, "executing query")
|
||||||
ID: int64(*qfields[i].ID),
|
|
||||||
Name: *qfields[i].Name,
|
|
||||||
Entries: dbEntriesToFieldEntries(qfields[i].Entries),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return fs, nil
|
return fs, nil
|
||||||
|
@ -81,34 +77,25 @@ func (db *DB) SetUserFields(ctx context.Context, tx pgx.Tx, userID xid.ID, field
|
||||||
return errors.Wrap(err, "deleting existing fields")
|
return errors.Wrap(err, "deleting existing fields")
|
||||||
}
|
}
|
||||||
|
|
||||||
querier := queries.NewQuerier(tx)
|
|
||||||
for _, field := range fields {
|
for _, field := range fields {
|
||||||
querier.InsertUserField(ctx, queries.InsertUserFieldParams{
|
_, err := tx.Exec(ctx, "INSERT INTO user_fields (user_id, name, entries) VALUES ($1, $2, $3)", userID, field.Name, field.Entries)
|
||||||
UserID: userID.String(),
|
|
||||||
Name: field.Name,
|
|
||||||
Entries: entriesToDBEntries(field.Entries),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "inserting new fields")
|
return errors.Wrap(err, "inserting new fields")
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// MemberFields returns the fields associated with the given member ID.
|
// MemberFields returns the fields associated with the given member ID.
|
||||||
func (db *DB) MemberFields(ctx context.Context, id xid.ID) (fs []Field, err error) {
|
func (db *DB) MemberFields(ctx context.Context, id xid.ID) (fs []Field, err error) {
|
||||||
qfields, err := db.q.GetMemberFields(ctx, id.String())
|
sql, args, err := sq.Select("*").From("member_fields").Where("member_id = ?", id).OrderBy("id").ToSql()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrap(err, "querying fields")
|
return fs, errors.Wrap(err, "building sql")
|
||||||
}
|
}
|
||||||
|
|
||||||
fs = make([]Field, len(qfields))
|
err = pgxscan.Select(ctx, db, &fs, sql, args...)
|
||||||
for i := range qfields {
|
if err != nil {
|
||||||
fs[i] = Field{
|
return fs, errors.Wrap(err, "executing query")
|
||||||
ID: int64(*qfields[i].ID),
|
|
||||||
Name: *qfields[i].Name,
|
|
||||||
Entries: dbEntriesToFieldEntries(qfields[i].Entries),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return fs, nil
|
return fs, nil
|
||||||
|
@ -126,16 +113,11 @@ func (db *DB) SetMemberFields(ctx context.Context, tx pgx.Tx, memberID xid.ID, f
|
||||||
return errors.Wrap(err, "deleting existing fields")
|
return errors.Wrap(err, "deleting existing fields")
|
||||||
}
|
}
|
||||||
|
|
||||||
querier := queries.NewQuerier(tx)
|
|
||||||
for _, field := range fields {
|
for _, field := range fields {
|
||||||
querier.InsertMemberField(ctx, queries.InsertMemberFieldParams{
|
_, err := tx.Exec(ctx, "INSERT INTO member_fields (member_id, name, entries) VALUES ($1, $2, $3)", memberID, field.Name, field.Entries)
|
||||||
MemberID: memberID.String(),
|
|
||||||
Name: field.Name,
|
|
||||||
Entries: entriesToDBEntries(field.Entries),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "inserting new fields")
|
return errors.Wrap(err, "inserting new fields")
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,6 @@ package db
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
"codeberg.org/u1f320/pronouns.cc/backend/db/queries"
|
|
||||||
"emperror.dev/errors"
|
"emperror.dev/errors"
|
||||||
"github.com/georgysavva/scany/pgxscan"
|
"github.com/georgysavva/scany/pgxscan"
|
||||||
"github.com/jackc/pgconn"
|
"github.com/jackc/pgconn"
|
||||||
|
@ -33,64 +32,36 @@ const (
|
||||||
ErrMemberNameInUse = errors.Sentinel("member name already in use")
|
ErrMemberNameInUse = errors.Sentinel("member name already in use")
|
||||||
)
|
)
|
||||||
|
|
||||||
func (db *DB) getMember(ctx context.Context, q querier, id xid.ID) (m Member, err error) {
|
func (db *DB) Member(ctx context.Context, id xid.ID) (m Member, err error) {
|
||||||
qm, err := queries.NewQuerier(q).GetMemberByID(ctx, id.String())
|
sql, args, err := sq.Select("*").From("members").Where("id = ?", id).ToSql()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return m, errors.Wrap(err, "getting member from db")
|
return m, errors.Wrap(err, "building sql")
|
||||||
}
|
}
|
||||||
|
|
||||||
userID, err := xid.FromString(qm.UserID)
|
err = pgxscan.Get(ctx, db, &m, sql, args...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return m, errors.Wrap(err, "parsing user ID")
|
return m, errors.Wrap(err, "executing query")
|
||||||
}
|
|
||||||
|
|
||||||
m = Member{
|
|
||||||
ID: id,
|
|
||||||
UserID: userID,
|
|
||||||
Name: qm.Name,
|
|
||||||
DisplayName: qm.DisplayName,
|
|
||||||
Bio: qm.Bio,
|
|
||||||
AvatarURLs: qm.AvatarUrls,
|
|
||||||
Links: qm.Links,
|
|
||||||
Names: fieldEntriesFromDB(qm.Names),
|
|
||||||
Pronouns: pronounsFromDB(qm.Pronouns),
|
|
||||||
}
|
}
|
||||||
return m, nil
|
return m, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (db *DB) Member(ctx context.Context, id xid.ID) (m Member, err error) {
|
|
||||||
return db.getMember(ctx, db, id)
|
|
||||||
}
|
|
||||||
|
|
||||||
// UserMember returns a member scoped by user.
|
// UserMember returns a member scoped by user.
|
||||||
func (db *DB) UserMember(ctx context.Context, userID xid.ID, memberRef string) (m Member, err error) {
|
func (db *DB) UserMember(ctx context.Context, userID xid.ID, memberRef string) (m Member, err error) {
|
||||||
qm, err := db.q.GetMemberByName(ctx, userID.String(), memberRef)
|
sql, args, err := sq.Select("*").From("members").Where("user_id = ?", userID).Where("(id = ? or name = ?)", memberRef, memberRef).ToSql()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return m, errors.Wrap(err, "getting member from db")
|
return m, errors.Wrap(err, "building sql")
|
||||||
}
|
}
|
||||||
|
|
||||||
memberID, err := xid.FromString(qm.ID)
|
err = pgxscan.Get(ctx, db, &m, sql, args...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return m, errors.Wrap(err, "parsing member ID")
|
return m, errors.Wrap(err, "executing query")
|
||||||
}
|
|
||||||
|
|
||||||
m = Member{
|
|
||||||
ID: memberID,
|
|
||||||
UserID: userID,
|
|
||||||
Name: qm.Name,
|
|
||||||
DisplayName: qm.DisplayName,
|
|
||||||
Bio: qm.Bio,
|
|
||||||
AvatarURLs: qm.AvatarUrls,
|
|
||||||
Links: qm.Links,
|
|
||||||
Names: fieldEntriesFromDB(qm.Names),
|
|
||||||
Pronouns: pronounsFromDB(qm.Pronouns),
|
|
||||||
}
|
}
|
||||||
return m, nil
|
return m, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// UserMembers returns all of a user's members, sorted by name.
|
// UserMembers returns all of a user's members, sorted by name.
|
||||||
func (db *DB) UserMembers(ctx context.Context, userID xid.ID) (ms []Member, err error) {
|
func (db *DB) UserMembers(ctx context.Context, userID xid.ID) (ms []Member, err error) {
|
||||||
sql, args, err := sq.Select("id", "user_id", "name", "display_name", "bio", "avatar_urls").
|
sql, args, err := sq.Select("id", "user_id", "name", "display_name", "bio", "avatar_urls", "names", "pronouns").
|
||||||
From("members").Where("user_id = ?", userID).
|
From("members").Where("user_id = ?", userID).
|
||||||
OrderBy("name", "id").ToSql()
|
OrderBy("name", "id").ToSql()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -109,17 +80,19 @@ func (db *DB) UserMembers(ctx context.Context, userID xid.ID) (ms []Member, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// CreateMember creates a member.
|
// CreateMember creates a member.
|
||||||
func (db *DB) CreateMember(ctx context.Context, tx pgx.Tx, userID xid.ID, name string, displayName *string, bio string, links []string) (m Member, err error) {
|
func (db *DB) CreateMember(
|
||||||
|
ctx context.Context, tx pgx.Tx, userID xid.ID,
|
||||||
|
name string, displayName *string, bio string, links []string,
|
||||||
|
) (m Member, err error) {
|
||||||
sql, args, err := sq.Insert("members").
|
sql, args, err := sq.Insert("members").
|
||||||
Columns("user_id", "id", "name", "display_name", "bio", "links").
|
Columns("user_id", "id", "name", "display_name", "bio", "links").
|
||||||
Values(userID, xid.New(), name, displayName, bio, links).
|
Values(userID, xid.New(), name, displayName, bio, links).
|
||||||
Suffix("RETURNING id").ToSql()
|
Suffix("RETURNING *").ToSql()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return m, errors.Wrap(err, "building sql")
|
return m, errors.Wrap(err, "building sql")
|
||||||
}
|
}
|
||||||
|
|
||||||
var id xid.ID
|
err = pgxscan.Get(ctx, tx, &m, sql, args...)
|
||||||
err = tx.QueryRow(ctx, sql, args...).Scan(&id)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
pge := &pgconn.PgError{}
|
pge := &pgconn.PgError{}
|
||||||
if errors.As(err, &pge) {
|
if errors.As(err, &pge) {
|
||||||
|
@ -131,12 +104,6 @@ func (db *DB) CreateMember(ctx context.Context, tx pgx.Tx, userID xid.ID, name s
|
||||||
|
|
||||||
return m, errors.Wrap(err, "executing query")
|
return m, errors.Wrap(err, "executing query")
|
||||||
}
|
}
|
||||||
|
|
||||||
m, err = db.getMember(ctx, tx, id)
|
|
||||||
if err != nil {
|
|
||||||
return m, errors.Wrap(err, "getting created member")
|
|
||||||
}
|
|
||||||
|
|
||||||
return m, nil
|
return m, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -177,10 +144,20 @@ func (db *DB) UpdateMember(
|
||||||
avatarURLs []string,
|
avatarURLs []string,
|
||||||
) (m Member, err error) {
|
) (m Member, err error) {
|
||||||
if name == nil && displayName == nil && bio == nil && links == nil && avatarURLs == nil {
|
if name == nil && displayName == nil && bio == nil && links == nil && avatarURLs == nil {
|
||||||
return db.getMember(ctx, tx, id)
|
// get member
|
||||||
|
sql, args, err := sq.Select("*").From("members").Where("id = ?", id).ToSql()
|
||||||
|
if err != nil {
|
||||||
|
return m, errors.Wrap(err, "building sql")
|
||||||
}
|
}
|
||||||
|
|
||||||
builder := sq.Update("members").Where("id = ?", id)
|
err = pgxscan.Get(ctx, tx, &m, sql, args...)
|
||||||
|
if err != nil {
|
||||||
|
return m, errors.Wrap(err, "executing query")
|
||||||
|
}
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
builder := sq.Update("members").Where("id = ?", id).Suffix("RETURNING *")
|
||||||
if name != nil {
|
if name != nil {
|
||||||
if *name == "" {
|
if *name == "" {
|
||||||
builder = builder.Set("name", nil)
|
builder = builder.Set("name", nil)
|
||||||
|
@ -203,27 +180,19 @@ func (db *DB) UpdateMember(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if links != nil {
|
if links != nil {
|
||||||
if len(*links) == 0 {
|
|
||||||
builder = builder.Set("links", nil)
|
|
||||||
} else {
|
|
||||||
builder = builder.Set("links", *links)
|
builder = builder.Set("links", *links)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if avatarURLs != nil {
|
if avatarURLs != nil {
|
||||||
if len(avatarURLs) == 0 {
|
|
||||||
builder = builder.Set("avatar_urls", nil)
|
|
||||||
} else {
|
|
||||||
builder = builder.Set("avatar_urls", avatarURLs)
|
builder = builder.Set("avatar_urls", avatarURLs)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
sql, args, err := builder.ToSql()
|
sql, args, err := builder.ToSql()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return m, errors.Wrap(err, "building sql")
|
return m, errors.Wrap(err, "building sql")
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = tx.Exec(ctx, sql, args...)
|
err = pgxscan.Get(ctx, tx, &m, sql, args...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
pge := &pgconn.PgError{}
|
pge := &pgconn.PgError{}
|
||||||
if errors.As(err, &pge) {
|
if errors.As(err, &pge) {
|
||||||
|
@ -234,11 +203,5 @@ func (db *DB) UpdateMember(
|
||||||
|
|
||||||
return m, errors.Wrap(err, "executing sql")
|
return m, errors.Wrap(err, "executing sql")
|
||||||
}
|
}
|
||||||
|
|
||||||
m, err = db.getMember(ctx, tx, id)
|
|
||||||
if err != nil {
|
|
||||||
return m, errors.Wrap(err, "getting member")
|
|
||||||
}
|
|
||||||
|
|
||||||
return m, nil
|
return m, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,55 +3,33 @@ package db
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
"codeberg.org/u1f320/pronouns.cc/backend/db/queries"
|
|
||||||
"emperror.dev/errors"
|
"emperror.dev/errors"
|
||||||
"github.com/jackc/pgx/v4"
|
"github.com/jackc/pgx/v4"
|
||||||
"github.com/rs/xid"
|
"github.com/rs/xid"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (db *DB) SetUserNamesPronouns(ctx context.Context, tx pgx.Tx, userID xid.ID, names []FieldEntry, pronouns []PronounEntry) (err error) {
|
func (db *DB) SetUserNamesPronouns(ctx context.Context, tx pgx.Tx, userID xid.ID, names []FieldEntry, pronouns []PronounEntry) (err error) {
|
||||||
_, err = queries.NewQuerier(tx).UpdateUserNamesPronouns(ctx, queries.UpdateUserNamesPronounsParams{
|
sql, args, err := sq.Update("users").Set("names", names).Set("pronouns", pronouns).Where("id = ?", userID).ToSql()
|
||||||
ID: userID.String(),
|
|
||||||
Names: entriesToDBEntries(names),
|
|
||||||
Pronouns: pronounEntriesToDBEntries(pronouns),
|
|
||||||
})
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "executing update names/pronouns query")
|
return errors.Wrap(err, "building sql")
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = tx.Exec(ctx, sql, args...)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, "executing query")
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (db *DB) SetMemberNamesPronouns(ctx context.Context, tx pgx.Tx, memberID xid.ID, names []FieldEntry, pronouns []PronounEntry) (err error) {
|
func (db *DB) SetMemberNamesPronouns(ctx context.Context, tx pgx.Tx, memberID xid.ID, names []FieldEntry, pronouns []PronounEntry) (err error) {
|
||||||
_, err = queries.NewQuerier(tx).UpdateMemberNamesPronouns(ctx, queries.UpdateMemberNamesPronounsParams{
|
sql, args, err := sq.Update("members").Set("names", names).Set("pronouns", pronouns).Where("id = ?", memberID).ToSql()
|
||||||
ID: memberID.String(),
|
|
||||||
Names: entriesToDBEntries(names),
|
|
||||||
Pronouns: pronounEntriesToDBEntries(pronouns),
|
|
||||||
})
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "executing update names/pronouns query")
|
return errors.Wrap(err, "building sql")
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = tx.Exec(ctx, sql, args...)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, "executing query")
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func fieldEntriesFromDB(dn []queries.FieldEntry) []FieldEntry {
|
|
||||||
names := make([]FieldEntry, len(dn))
|
|
||||||
for i := range dn {
|
|
||||||
names[i] = FieldEntry{
|
|
||||||
Value: *dn[i].Value,
|
|
||||||
Status: WordStatus(*dn[i].Status),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return names
|
|
||||||
}
|
|
||||||
|
|
||||||
func pronounsFromDB(dn []queries.PronounEntry) []PronounEntry {
|
|
||||||
pronouns := make([]PronounEntry, len(dn))
|
|
||||||
for i := range dn {
|
|
||||||
pronouns[i] = PronounEntry{
|
|
||||||
DisplayText: dn[i].DisplayValue,
|
|
||||||
Pronouns: *dn[i].Value,
|
|
||||||
Status: WordStatus(*dn[i].Status),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return pronouns
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
package queries
|
|
||||||
|
|
||||||
//go:generate pggen gen go --query-glob queries.user.sql --query-glob queries.member.sql --postgres-connection "postgres://pggen:pggen@localhost/pggen"
|
|
|
@ -1,31 +0,0 @@
|
||||||
-- name: GetMemberByID :one
|
|
||||||
SELECT * FROM members
|
|
||||||
WHERE id = pggen.arg('id');
|
|
||||||
|
|
||||||
-- name: GetMemberByName :one
|
|
||||||
SELECT * FROM members
|
|
||||||
WHERE user_id = pggen.arg('user_id') AND (
|
|
||||||
id = pggen.arg('member_ref')
|
|
||||||
OR name = pggen.arg('member_ref')
|
|
||||||
);
|
|
||||||
|
|
||||||
-- name: GetMembers :many
|
|
||||||
SELECT * FROM members
|
|
||||||
WHERE user_id = pggen.arg('user_id')
|
|
||||||
ORDER BY name, id;
|
|
||||||
|
|
||||||
-- name: UpdateMemberNamesPronouns :one
|
|
||||||
UPDATE members SET
|
|
||||||
names = pggen.arg('names'),
|
|
||||||
pronouns = pggen.arg('pronouns')
|
|
||||||
WHERE id = pggen.arg('id')
|
|
||||||
RETURNING *;
|
|
||||||
|
|
||||||
-- name: GetMemberFields :many
|
|
||||||
SELECT * FROM member_fields WHERE member_id = pggen.arg('member_id') ORDER BY id ASC;
|
|
||||||
|
|
||||||
-- name: InsertMemberField :one
|
|
||||||
INSERT INTO member_fields
|
|
||||||
(member_id, name, entries) VALUES
|
|
||||||
(pggen.arg('member_id'), pggen.arg('name'), pggen.arg('entries'))
|
|
||||||
RETURNING *;
|
|
|
@ -1,803 +0,0 @@
|
||||||
// Code generated by pggen. DO NOT EDIT.
|
|
||||||
|
|
||||||
package queries
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"github.com/jackc/pgconn"
|
|
||||||
"github.com/jackc/pgtype"
|
|
||||||
"github.com/jackc/pgx/v4"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Querier is a typesafe Go interface backed by SQL queries.
|
|
||||||
//
|
|
||||||
// Methods ending with Batch enqueue a query to run later in a pgx.Batch. After
|
|
||||||
// calling SendBatch on pgx.Conn, pgxpool.Pool, or pgx.Tx, use the Scan methods
|
|
||||||
// to parse the results.
|
|
||||||
type Querier interface {
|
|
||||||
GetMemberByID(ctx context.Context, id string) (GetMemberByIDRow, error)
|
|
||||||
// GetMemberByIDBatch enqueues a GetMemberByID query into batch to be executed
|
|
||||||
// later by the batch.
|
|
||||||
GetMemberByIDBatch(batch genericBatch, id string)
|
|
||||||
// GetMemberByIDScan scans the result of an executed GetMemberByIDBatch query.
|
|
||||||
GetMemberByIDScan(results pgx.BatchResults) (GetMemberByIDRow, error)
|
|
||||||
|
|
||||||
GetMemberByName(ctx context.Context, userID string, memberRef string) (GetMemberByNameRow, error)
|
|
||||||
// GetMemberByNameBatch enqueues a GetMemberByName query into batch to be executed
|
|
||||||
// later by the batch.
|
|
||||||
GetMemberByNameBatch(batch genericBatch, userID string, memberRef string)
|
|
||||||
// GetMemberByNameScan scans the result of an executed GetMemberByNameBatch query.
|
|
||||||
GetMemberByNameScan(results pgx.BatchResults) (GetMemberByNameRow, error)
|
|
||||||
|
|
||||||
GetMembers(ctx context.Context, userID string) ([]GetMembersRow, error)
|
|
||||||
// GetMembersBatch enqueues a GetMembers query into batch to be executed
|
|
||||||
// later by the batch.
|
|
||||||
GetMembersBatch(batch genericBatch, userID string)
|
|
||||||
// GetMembersScan scans the result of an executed GetMembersBatch query.
|
|
||||||
GetMembersScan(results pgx.BatchResults) ([]GetMembersRow, error)
|
|
||||||
|
|
||||||
UpdateMemberNamesPronouns(ctx context.Context, params UpdateMemberNamesPronounsParams) (UpdateMemberNamesPronounsRow, error)
|
|
||||||
// UpdateMemberNamesPronounsBatch enqueues a UpdateMemberNamesPronouns query into batch to be executed
|
|
||||||
// later by the batch.
|
|
||||||
UpdateMemberNamesPronounsBatch(batch genericBatch, params UpdateMemberNamesPronounsParams)
|
|
||||||
// UpdateMemberNamesPronounsScan scans the result of an executed UpdateMemberNamesPronounsBatch query.
|
|
||||||
UpdateMemberNamesPronounsScan(results pgx.BatchResults) (UpdateMemberNamesPronounsRow, error)
|
|
||||||
|
|
||||||
GetMemberFields(ctx context.Context, memberID string) ([]GetMemberFieldsRow, error)
|
|
||||||
// GetMemberFieldsBatch enqueues a GetMemberFields query into batch to be executed
|
|
||||||
// later by the batch.
|
|
||||||
GetMemberFieldsBatch(batch genericBatch, memberID string)
|
|
||||||
// GetMemberFieldsScan scans the result of an executed GetMemberFieldsBatch query.
|
|
||||||
GetMemberFieldsScan(results pgx.BatchResults) ([]GetMemberFieldsRow, error)
|
|
||||||
|
|
||||||
InsertMemberField(ctx context.Context, params InsertMemberFieldParams) (InsertMemberFieldRow, error)
|
|
||||||
// InsertMemberFieldBatch enqueues a InsertMemberField query into batch to be executed
|
|
||||||
// later by the batch.
|
|
||||||
InsertMemberFieldBatch(batch genericBatch, params InsertMemberFieldParams)
|
|
||||||
// InsertMemberFieldScan scans the result of an executed InsertMemberFieldBatch query.
|
|
||||||
InsertMemberFieldScan(results pgx.BatchResults) (InsertMemberFieldRow, error)
|
|
||||||
|
|
||||||
GetUserByID(ctx context.Context, id string) (GetUserByIDRow, error)
|
|
||||||
// GetUserByIDBatch enqueues a GetUserByID query into batch to be executed
|
|
||||||
// later by the batch.
|
|
||||||
GetUserByIDBatch(batch genericBatch, id string)
|
|
||||||
// GetUserByIDScan scans the result of an executed GetUserByIDBatch query.
|
|
||||||
GetUserByIDScan(results pgx.BatchResults) (GetUserByIDRow, error)
|
|
||||||
|
|
||||||
GetUserByUsername(ctx context.Context, username string) (GetUserByUsernameRow, error)
|
|
||||||
// GetUserByUsernameBatch enqueues a GetUserByUsername query into batch to be executed
|
|
||||||
// later by the batch.
|
|
||||||
GetUserByUsernameBatch(batch genericBatch, username string)
|
|
||||||
// GetUserByUsernameScan scans the result of an executed GetUserByUsernameBatch query.
|
|
||||||
GetUserByUsernameScan(results pgx.BatchResults) (GetUserByUsernameRow, error)
|
|
||||||
|
|
||||||
UpdateUserNamesPronouns(ctx context.Context, params UpdateUserNamesPronounsParams) (UpdateUserNamesPronounsRow, error)
|
|
||||||
// UpdateUserNamesPronounsBatch enqueues a UpdateUserNamesPronouns query into batch to be executed
|
|
||||||
// later by the batch.
|
|
||||||
UpdateUserNamesPronounsBatch(batch genericBatch, params UpdateUserNamesPronounsParams)
|
|
||||||
// UpdateUserNamesPronounsScan scans the result of an executed UpdateUserNamesPronounsBatch query.
|
|
||||||
UpdateUserNamesPronounsScan(results pgx.BatchResults) (UpdateUserNamesPronounsRow, error)
|
|
||||||
|
|
||||||
GetUserFields(ctx context.Context, userID string) ([]GetUserFieldsRow, error)
|
|
||||||
// GetUserFieldsBatch enqueues a GetUserFields query into batch to be executed
|
|
||||||
// later by the batch.
|
|
||||||
GetUserFieldsBatch(batch genericBatch, userID string)
|
|
||||||
// GetUserFieldsScan scans the result of an executed GetUserFieldsBatch query.
|
|
||||||
GetUserFieldsScan(results pgx.BatchResults) ([]GetUserFieldsRow, error)
|
|
||||||
|
|
||||||
InsertUserField(ctx context.Context, params InsertUserFieldParams) (InsertUserFieldRow, error)
|
|
||||||
// InsertUserFieldBatch enqueues a InsertUserField query into batch to be executed
|
|
||||||
// later by the batch.
|
|
||||||
InsertUserFieldBatch(batch genericBatch, params InsertUserFieldParams)
|
|
||||||
// InsertUserFieldScan scans the result of an executed InsertUserFieldBatch query.
|
|
||||||
InsertUserFieldScan(results pgx.BatchResults) (InsertUserFieldRow, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
type DBQuerier struct {
|
|
||||||
conn genericConn // underlying Postgres transport to use
|
|
||||||
types *typeResolver // resolve types by name
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ Querier = &DBQuerier{}
|
|
||||||
|
|
||||||
// genericConn is a connection to a Postgres database. This is usually backed by
|
|
||||||
// *pgx.Conn, pgx.Tx, or *pgxpool.Pool.
|
|
||||||
type genericConn interface {
|
|
||||||
// Query executes sql with args. If there is an error the returned Rows will
|
|
||||||
// be returned in an error state. So it is allowed to ignore the error
|
|
||||||
// returned from Query and handle it in Rows.
|
|
||||||
Query(ctx context.Context, sql string, args ...interface{}) (pgx.Rows, error)
|
|
||||||
|
|
||||||
// QueryRow is a convenience wrapper over Query. Any error that occurs while
|
|
||||||
// querying is deferred until calling Scan on the returned Row. That Row will
|
|
||||||
// error with pgx.ErrNoRows if no rows are returned.
|
|
||||||
QueryRow(ctx context.Context, sql string, args ...interface{}) pgx.Row
|
|
||||||
|
|
||||||
// Exec executes sql. sql can be either a prepared statement name or an SQL
|
|
||||||
// string. arguments should be referenced positionally from the sql string
|
|
||||||
// as $1, $2, etc.
|
|
||||||
Exec(ctx context.Context, sql string, arguments ...interface{}) (pgconn.CommandTag, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// genericBatch batches queries to send in a single network request to a
|
|
||||||
// Postgres server. This is usually backed by *pgx.Batch.
|
|
||||||
type genericBatch interface {
|
|
||||||
// Queue queues a query to batch b. query can be an SQL query or the name of a
|
|
||||||
// prepared statement. See Queue on *pgx.Batch.
|
|
||||||
Queue(query string, arguments ...interface{})
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewQuerier creates a DBQuerier that implements Querier. conn is typically
|
|
||||||
// *pgx.Conn, pgx.Tx, or *pgxpool.Pool.
|
|
||||||
func NewQuerier(conn genericConn) *DBQuerier {
|
|
||||||
return NewQuerierConfig(conn, QuerierConfig{})
|
|
||||||
}
|
|
||||||
|
|
||||||
type QuerierConfig struct {
|
|
||||||
// DataTypes contains pgtype.Value to use for encoding and decoding instead
|
|
||||||
// of pggen-generated pgtype.ValueTranscoder.
|
|
||||||
//
|
|
||||||
// If OIDs are available for an input parameter type and all of its
|
|
||||||
// transitive dependencies, pggen will use the binary encoding format for
|
|
||||||
// the input parameter.
|
|
||||||
DataTypes []pgtype.DataType
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewQuerierConfig creates a DBQuerier that implements Querier with the given
|
|
||||||
// config. conn is typically *pgx.Conn, pgx.Tx, or *pgxpool.Pool.
|
|
||||||
func NewQuerierConfig(conn genericConn, cfg QuerierConfig) *DBQuerier {
|
|
||||||
return &DBQuerier{conn: conn, types: newTypeResolver(cfg.DataTypes)}
|
|
||||||
}
|
|
||||||
|
|
||||||
// WithTx creates a new DBQuerier that uses the transaction to run all queries.
|
|
||||||
func (q *DBQuerier) WithTx(tx pgx.Tx) (*DBQuerier, error) {
|
|
||||||
return &DBQuerier{conn: tx}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// preparer is any Postgres connection transport that provides a way to prepare
|
|
||||||
// a statement, most commonly *pgx.Conn.
|
|
||||||
type preparer interface {
|
|
||||||
Prepare(ctx context.Context, name, sql string) (sd *pgconn.StatementDescription, err error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// PrepareAllQueries executes a PREPARE statement for all pggen generated SQL
|
|
||||||
// queries in querier files. Typical usage is as the AfterConnect callback
|
|
||||||
// for pgxpool.Config
|
|
||||||
//
|
|
||||||
// pgx will use the prepared statement if available. Calling PrepareAllQueries
|
|
||||||
// is an optional optimization to avoid a network round-trip the first time pgx
|
|
||||||
// runs a query if pgx statement caching is enabled.
|
|
||||||
func PrepareAllQueries(ctx context.Context, p preparer) error {
|
|
||||||
if _, err := p.Prepare(ctx, getMemberByIDSQL, getMemberByIDSQL); err != nil {
|
|
||||||
return fmt.Errorf("prepare query 'GetMemberByID': %w", err)
|
|
||||||
}
|
|
||||||
if _, err := p.Prepare(ctx, getMemberByNameSQL, getMemberByNameSQL); err != nil {
|
|
||||||
return fmt.Errorf("prepare query 'GetMemberByName': %w", err)
|
|
||||||
}
|
|
||||||
if _, err := p.Prepare(ctx, getMembersSQL, getMembersSQL); err != nil {
|
|
||||||
return fmt.Errorf("prepare query 'GetMembers': %w", err)
|
|
||||||
}
|
|
||||||
if _, err := p.Prepare(ctx, updateMemberNamesPronounsSQL, updateMemberNamesPronounsSQL); err != nil {
|
|
||||||
return fmt.Errorf("prepare query 'UpdateMemberNamesPronouns': %w", err)
|
|
||||||
}
|
|
||||||
if _, err := p.Prepare(ctx, getMemberFieldsSQL, getMemberFieldsSQL); err != nil {
|
|
||||||
return fmt.Errorf("prepare query 'GetMemberFields': %w", err)
|
|
||||||
}
|
|
||||||
if _, err := p.Prepare(ctx, insertMemberFieldSQL, insertMemberFieldSQL); err != nil {
|
|
||||||
return fmt.Errorf("prepare query 'InsertMemberField': %w", err)
|
|
||||||
}
|
|
||||||
if _, err := p.Prepare(ctx, getUserByIDSQL, getUserByIDSQL); err != nil {
|
|
||||||
return fmt.Errorf("prepare query 'GetUserByID': %w", err)
|
|
||||||
}
|
|
||||||
if _, err := p.Prepare(ctx, getUserByUsernameSQL, getUserByUsernameSQL); err != nil {
|
|
||||||
return fmt.Errorf("prepare query 'GetUserByUsername': %w", err)
|
|
||||||
}
|
|
||||||
if _, err := p.Prepare(ctx, updateUserNamesPronounsSQL, updateUserNamesPronounsSQL); err != nil {
|
|
||||||
return fmt.Errorf("prepare query 'UpdateUserNamesPronouns': %w", err)
|
|
||||||
}
|
|
||||||
if _, err := p.Prepare(ctx, getUserFieldsSQL, getUserFieldsSQL); err != nil {
|
|
||||||
return fmt.Errorf("prepare query 'GetUserFields': %w", err)
|
|
||||||
}
|
|
||||||
if _, err := p.Prepare(ctx, insertUserFieldSQL, insertUserFieldSQL); err != nil {
|
|
||||||
return fmt.Errorf("prepare query 'InsertUserField': %w", err)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// FieldEntry represents the Postgres composite type "field_entry".
|
|
||||||
type FieldEntry struct {
|
|
||||||
Value *string `json:"value"`
|
|
||||||
Status *int32 `json:"status"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// PronounEntry represents the Postgres composite type "pronoun_entry".
|
|
||||||
type PronounEntry struct {
|
|
||||||
Value *string `json:"value"`
|
|
||||||
DisplayValue *string `json:"display_value"`
|
|
||||||
Status *int32 `json:"status"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// typeResolver looks up the pgtype.ValueTranscoder by Postgres type name.
|
|
||||||
type typeResolver struct {
|
|
||||||
connInfo *pgtype.ConnInfo // types by Postgres type name
|
|
||||||
}
|
|
||||||
|
|
||||||
func newTypeResolver(types []pgtype.DataType) *typeResolver {
|
|
||||||
ci := pgtype.NewConnInfo()
|
|
||||||
for _, typ := range types {
|
|
||||||
if txt, ok := typ.Value.(textPreferrer); ok && typ.OID != unknownOID {
|
|
||||||
typ.Value = txt.ValueTranscoder
|
|
||||||
}
|
|
||||||
ci.RegisterDataType(typ)
|
|
||||||
}
|
|
||||||
return &typeResolver{connInfo: ci}
|
|
||||||
}
|
|
||||||
|
|
||||||
// findValue find the OID, and pgtype.ValueTranscoder for a Postgres type name.
|
|
||||||
func (tr *typeResolver) findValue(name string) (uint32, pgtype.ValueTranscoder, bool) {
|
|
||||||
typ, ok := tr.connInfo.DataTypeForName(name)
|
|
||||||
if !ok {
|
|
||||||
return 0, nil, false
|
|
||||||
}
|
|
||||||
v := pgtype.NewValue(typ.Value)
|
|
||||||
return typ.OID, v.(pgtype.ValueTranscoder), true
|
|
||||||
}
|
|
||||||
|
|
||||||
// setValue sets the value of a ValueTranscoder to a value that should always
|
|
||||||
// work and panics if it fails.
|
|
||||||
func (tr *typeResolver) setValue(vt pgtype.ValueTranscoder, val interface{}) pgtype.ValueTranscoder {
|
|
||||||
if err := vt.Set(val); err != nil {
|
|
||||||
panic(fmt.Sprintf("set ValueTranscoder %T to %+v: %s", vt, val, err))
|
|
||||||
}
|
|
||||||
return vt
|
|
||||||
}
|
|
||||||
|
|
||||||
type compositeField struct {
|
|
||||||
name string // name of the field
|
|
||||||
typeName string // Postgres type name
|
|
||||||
defaultVal pgtype.ValueTranscoder // default value to use
|
|
||||||
}
|
|
||||||
|
|
||||||
func (tr *typeResolver) newCompositeValue(name string, fields ...compositeField) pgtype.ValueTranscoder {
|
|
||||||
if _, val, ok := tr.findValue(name); ok {
|
|
||||||
return val
|
|
||||||
}
|
|
||||||
fs := make([]pgtype.CompositeTypeField, len(fields))
|
|
||||||
vals := make([]pgtype.ValueTranscoder, len(fields))
|
|
||||||
isBinaryOk := true
|
|
||||||
for i, field := range fields {
|
|
||||||
oid, val, ok := tr.findValue(field.typeName)
|
|
||||||
if !ok {
|
|
||||||
oid = unknownOID
|
|
||||||
val = field.defaultVal
|
|
||||||
}
|
|
||||||
isBinaryOk = isBinaryOk && oid != unknownOID
|
|
||||||
fs[i] = pgtype.CompositeTypeField{Name: field.name, OID: oid}
|
|
||||||
vals[i] = val
|
|
||||||
}
|
|
||||||
// Okay to ignore error because it's only thrown when the number of field
|
|
||||||
// names does not equal the number of ValueTranscoders.
|
|
||||||
typ, _ := pgtype.NewCompositeTypeValues(name, fs, vals)
|
|
||||||
if !isBinaryOk {
|
|
||||||
return textPreferrer{ValueTranscoder: typ, typeName: name}
|
|
||||||
}
|
|
||||||
return typ
|
|
||||||
}
|
|
||||||
|
|
||||||
func (tr *typeResolver) newArrayValue(name, elemName string, defaultVal func() pgtype.ValueTranscoder) pgtype.ValueTranscoder {
|
|
||||||
if _, val, ok := tr.findValue(name); ok {
|
|
||||||
return val
|
|
||||||
}
|
|
||||||
elemOID, elemVal, ok := tr.findValue(elemName)
|
|
||||||
elemValFunc := func() pgtype.ValueTranscoder {
|
|
||||||
return pgtype.NewValue(elemVal).(pgtype.ValueTranscoder)
|
|
||||||
}
|
|
||||||
if !ok {
|
|
||||||
elemOID = unknownOID
|
|
||||||
elemValFunc = defaultVal
|
|
||||||
}
|
|
||||||
typ := pgtype.NewArrayType(name, elemOID, elemValFunc)
|
|
||||||
if elemOID == unknownOID {
|
|
||||||
return textPreferrer{ValueTranscoder: typ, typeName: name}
|
|
||||||
}
|
|
||||||
return typ
|
|
||||||
}
|
|
||||||
|
|
||||||
// newFieldEntry creates a new pgtype.ValueTranscoder for the Postgres
|
|
||||||
// composite type 'field_entry'.
|
|
||||||
func (tr *typeResolver) newFieldEntry() pgtype.ValueTranscoder {
|
|
||||||
return tr.newCompositeValue(
|
|
||||||
"field_entry",
|
|
||||||
compositeField{name: "value", typeName: "text", defaultVal: &pgtype.Text{}},
|
|
||||||
compositeField{name: "status", typeName: "int4", defaultVal: &pgtype.Int4{}},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// newFieldEntryRaw returns all composite fields for the Postgres composite
|
|
||||||
// type 'field_entry' as a slice of interface{} to encode query parameters.
|
|
||||||
func (tr *typeResolver) newFieldEntryRaw(v FieldEntry) []interface{} {
|
|
||||||
return []interface{}{
|
|
||||||
v.Value,
|
|
||||||
v.Status,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// newPronounEntry creates a new pgtype.ValueTranscoder for the Postgres
|
|
||||||
// composite type 'pronoun_entry'.
|
|
||||||
func (tr *typeResolver) newPronounEntry() pgtype.ValueTranscoder {
|
|
||||||
return tr.newCompositeValue(
|
|
||||||
"pronoun_entry",
|
|
||||||
compositeField{name: "value", typeName: "text", defaultVal: &pgtype.Text{}},
|
|
||||||
compositeField{name: "display_value", typeName: "text", defaultVal: &pgtype.Text{}},
|
|
||||||
compositeField{name: "status", typeName: "int4", defaultVal: &pgtype.Int4{}},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// newPronounEntryRaw returns all composite fields for the Postgres composite
|
|
||||||
// type 'pronoun_entry' as a slice of interface{} to encode query parameters.
|
|
||||||
func (tr *typeResolver) newPronounEntryRaw(v PronounEntry) []interface{} {
|
|
||||||
return []interface{}{
|
|
||||||
v.Value,
|
|
||||||
v.DisplayValue,
|
|
||||||
v.Status,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// newFieldEntryArray creates a new pgtype.ValueTranscoder for the Postgres
|
|
||||||
// '_field_entry' array type.
|
|
||||||
func (tr *typeResolver) newFieldEntryArray() pgtype.ValueTranscoder {
|
|
||||||
return tr.newArrayValue("_field_entry", "field_entry", tr.newFieldEntry)
|
|
||||||
}
|
|
||||||
|
|
||||||
// newFieldEntryArrayInit creates an initialized pgtype.ValueTranscoder for the
|
|
||||||
// Postgres array type '_field_entry' to encode query parameters.
|
|
||||||
func (tr *typeResolver) newFieldEntryArrayInit(ps []FieldEntry) pgtype.ValueTranscoder {
|
|
||||||
dec := tr.newFieldEntryArray()
|
|
||||||
if err := dec.Set(tr.newFieldEntryArrayRaw(ps)); err != nil {
|
|
||||||
panic("encode []FieldEntry: " + err.Error()) // should always succeed
|
|
||||||
}
|
|
||||||
return textPreferrer{ValueTranscoder: dec, typeName: "_field_entry"}
|
|
||||||
}
|
|
||||||
|
|
||||||
// newFieldEntryArrayRaw returns all elements for the Postgres array type '_field_entry'
|
|
||||||
// as a slice of interface{} for use with the pgtype.Value Set method.
|
|
||||||
func (tr *typeResolver) newFieldEntryArrayRaw(vs []FieldEntry) []interface{} {
|
|
||||||
elems := make([]interface{}, len(vs))
|
|
||||||
for i, v := range vs {
|
|
||||||
elems[i] = tr.newFieldEntryRaw(v)
|
|
||||||
}
|
|
||||||
return elems
|
|
||||||
}
|
|
||||||
|
|
||||||
// newPronounEntryArray creates a new pgtype.ValueTranscoder for the Postgres
|
|
||||||
// '_pronoun_entry' array type.
|
|
||||||
func (tr *typeResolver) newPronounEntryArray() pgtype.ValueTranscoder {
|
|
||||||
return tr.newArrayValue("_pronoun_entry", "pronoun_entry", tr.newPronounEntry)
|
|
||||||
}
|
|
||||||
|
|
||||||
// newPronounEntryArrayInit creates an initialized pgtype.ValueTranscoder for the
|
|
||||||
// Postgres array type '_pronoun_entry' to encode query parameters.
|
|
||||||
func (tr *typeResolver) newPronounEntryArrayInit(ps []PronounEntry) pgtype.ValueTranscoder {
|
|
||||||
dec := tr.newPronounEntryArray()
|
|
||||||
if err := dec.Set(tr.newPronounEntryArrayRaw(ps)); err != nil {
|
|
||||||
panic("encode []PronounEntry: " + err.Error()) // should always succeed
|
|
||||||
}
|
|
||||||
return textPreferrer{ValueTranscoder: dec, typeName: "_pronoun_entry"}
|
|
||||||
}
|
|
||||||
|
|
||||||
// newPronounEntryArrayRaw returns all elements for the Postgres array type '_pronoun_entry'
|
|
||||||
// as a slice of interface{} for use with the pgtype.Value Set method.
|
|
||||||
func (tr *typeResolver) newPronounEntryArrayRaw(vs []PronounEntry) []interface{} {
|
|
||||||
elems := make([]interface{}, len(vs))
|
|
||||||
for i, v := range vs {
|
|
||||||
elems[i] = tr.newPronounEntryRaw(v)
|
|
||||||
}
|
|
||||||
return elems
|
|
||||||
}
|
|
||||||
|
|
||||||
const getMemberByIDSQL = `SELECT * FROM members
|
|
||||||
WHERE id = $1;`
|
|
||||||
|
|
||||||
type GetMemberByIDRow struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
UserID string `json:"user_id"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
Bio *string `json:"bio"`
|
|
||||||
AvatarUrls []string `json:"avatar_urls"`
|
|
||||||
Links []string `json:"links"`
|
|
||||||
DisplayName *string `json:"display_name"`
|
|
||||||
Names []FieldEntry `json:"names"`
|
|
||||||
Pronouns []PronounEntry `json:"pronouns"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetMemberByID implements Querier.GetMemberByID.
|
|
||||||
func (q *DBQuerier) GetMemberByID(ctx context.Context, id string) (GetMemberByIDRow, error) {
|
|
||||||
ctx = context.WithValue(ctx, "pggen_query_name", "GetMemberByID")
|
|
||||||
row := q.conn.QueryRow(ctx, getMemberByIDSQL, id)
|
|
||||||
var item GetMemberByIDRow
|
|
||||||
namesArray := q.types.newFieldEntryArray()
|
|
||||||
pronounsArray := q.types.newPronounEntryArray()
|
|
||||||
if err := row.Scan(&item.ID, &item.UserID, &item.Name, &item.Bio, &item.AvatarUrls, &item.Links, &item.DisplayName, namesArray, pronounsArray); err != nil {
|
|
||||||
return item, fmt.Errorf("query GetMemberByID: %w", err)
|
|
||||||
}
|
|
||||||
if err := namesArray.AssignTo(&item.Names); err != nil {
|
|
||||||
return item, fmt.Errorf("assign GetMemberByID row: %w", err)
|
|
||||||
}
|
|
||||||
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
|
||||||
return item, fmt.Errorf("assign GetMemberByID row: %w", err)
|
|
||||||
}
|
|
||||||
return item, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetMemberByIDBatch implements Querier.GetMemberByIDBatch.
|
|
||||||
func (q *DBQuerier) GetMemberByIDBatch(batch genericBatch, id string) {
|
|
||||||
batch.Queue(getMemberByIDSQL, id)
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetMemberByIDScan implements Querier.GetMemberByIDScan.
|
|
||||||
func (q *DBQuerier) GetMemberByIDScan(results pgx.BatchResults) (GetMemberByIDRow, error) {
|
|
||||||
row := results.QueryRow()
|
|
||||||
var item GetMemberByIDRow
|
|
||||||
namesArray := q.types.newFieldEntryArray()
|
|
||||||
pronounsArray := q.types.newPronounEntryArray()
|
|
||||||
if err := row.Scan(&item.ID, &item.UserID, &item.Name, &item.Bio, &item.AvatarUrls, &item.Links, &item.DisplayName, namesArray, pronounsArray); err != nil {
|
|
||||||
return item, fmt.Errorf("scan GetMemberByIDBatch row: %w", err)
|
|
||||||
}
|
|
||||||
if err := namesArray.AssignTo(&item.Names); err != nil {
|
|
||||||
return item, fmt.Errorf("assign GetMemberByID row: %w", err)
|
|
||||||
}
|
|
||||||
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
|
||||||
return item, fmt.Errorf("assign GetMemberByID row: %w", err)
|
|
||||||
}
|
|
||||||
return item, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
const getMemberByNameSQL = `SELECT * FROM members
|
|
||||||
WHERE user_id = $1 AND (
|
|
||||||
id = $2
|
|
||||||
OR name = $2
|
|
||||||
);`
|
|
||||||
|
|
||||||
type GetMemberByNameRow struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
UserID string `json:"user_id"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
Bio *string `json:"bio"`
|
|
||||||
AvatarUrls []string `json:"avatar_urls"`
|
|
||||||
Links []string `json:"links"`
|
|
||||||
DisplayName *string `json:"display_name"`
|
|
||||||
Names []FieldEntry `json:"names"`
|
|
||||||
Pronouns []PronounEntry `json:"pronouns"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetMemberByName implements Querier.GetMemberByName.
|
|
||||||
func (q *DBQuerier) GetMemberByName(ctx context.Context, userID string, memberRef string) (GetMemberByNameRow, error) {
|
|
||||||
ctx = context.WithValue(ctx, "pggen_query_name", "GetMemberByName")
|
|
||||||
row := q.conn.QueryRow(ctx, getMemberByNameSQL, userID, memberRef)
|
|
||||||
var item GetMemberByNameRow
|
|
||||||
namesArray := q.types.newFieldEntryArray()
|
|
||||||
pronounsArray := q.types.newPronounEntryArray()
|
|
||||||
if err := row.Scan(&item.ID, &item.UserID, &item.Name, &item.Bio, &item.AvatarUrls, &item.Links, &item.DisplayName, namesArray, pronounsArray); err != nil {
|
|
||||||
return item, fmt.Errorf("query GetMemberByName: %w", err)
|
|
||||||
}
|
|
||||||
if err := namesArray.AssignTo(&item.Names); err != nil {
|
|
||||||
return item, fmt.Errorf("assign GetMemberByName row: %w", err)
|
|
||||||
}
|
|
||||||
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
|
||||||
return item, fmt.Errorf("assign GetMemberByName row: %w", err)
|
|
||||||
}
|
|
||||||
return item, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetMemberByNameBatch implements Querier.GetMemberByNameBatch.
|
|
||||||
func (q *DBQuerier) GetMemberByNameBatch(batch genericBatch, userID string, memberRef string) {
|
|
||||||
batch.Queue(getMemberByNameSQL, userID, memberRef)
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetMemberByNameScan implements Querier.GetMemberByNameScan.
|
|
||||||
func (q *DBQuerier) GetMemberByNameScan(results pgx.BatchResults) (GetMemberByNameRow, error) {
|
|
||||||
row := results.QueryRow()
|
|
||||||
var item GetMemberByNameRow
|
|
||||||
namesArray := q.types.newFieldEntryArray()
|
|
||||||
pronounsArray := q.types.newPronounEntryArray()
|
|
||||||
if err := row.Scan(&item.ID, &item.UserID, &item.Name, &item.Bio, &item.AvatarUrls, &item.Links, &item.DisplayName, namesArray, pronounsArray); err != nil {
|
|
||||||
return item, fmt.Errorf("scan GetMemberByNameBatch row: %w", err)
|
|
||||||
}
|
|
||||||
if err := namesArray.AssignTo(&item.Names); err != nil {
|
|
||||||
return item, fmt.Errorf("assign GetMemberByName row: %w", err)
|
|
||||||
}
|
|
||||||
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
|
||||||
return item, fmt.Errorf("assign GetMemberByName row: %w", err)
|
|
||||||
}
|
|
||||||
return item, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
const getMembersSQL = `SELECT * FROM members
|
|
||||||
WHERE user_id = $1
|
|
||||||
ORDER BY name, id;`
|
|
||||||
|
|
||||||
type GetMembersRow struct {
|
|
||||||
ID *string `json:"id"`
|
|
||||||
UserID *string `json:"user_id"`
|
|
||||||
Name *string `json:"name"`
|
|
||||||
Bio *string `json:"bio"`
|
|
||||||
AvatarUrls []string `json:"avatar_urls"`
|
|
||||||
Links []string `json:"links"`
|
|
||||||
DisplayName *string `json:"display_name"`
|
|
||||||
Names []FieldEntry `json:"names"`
|
|
||||||
Pronouns []PronounEntry `json:"pronouns"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetMembers implements Querier.GetMembers.
|
|
||||||
func (q *DBQuerier) GetMembers(ctx context.Context, userID string) ([]GetMembersRow, error) {
|
|
||||||
ctx = context.WithValue(ctx, "pggen_query_name", "GetMembers")
|
|
||||||
rows, err := q.conn.Query(ctx, getMembersSQL, userID)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("query GetMembers: %w", err)
|
|
||||||
}
|
|
||||||
defer rows.Close()
|
|
||||||
items := []GetMembersRow{}
|
|
||||||
namesArray := q.types.newFieldEntryArray()
|
|
||||||
pronounsArray := q.types.newPronounEntryArray()
|
|
||||||
for rows.Next() {
|
|
||||||
var item GetMembersRow
|
|
||||||
if err := rows.Scan(&item.ID, &item.UserID, &item.Name, &item.Bio, &item.AvatarUrls, &item.Links, &item.DisplayName, namesArray, pronounsArray); err != nil {
|
|
||||||
return nil, fmt.Errorf("scan GetMembers row: %w", err)
|
|
||||||
}
|
|
||||||
if err := namesArray.AssignTo(&item.Names); err != nil {
|
|
||||||
return nil, fmt.Errorf("assign GetMembers row: %w", err)
|
|
||||||
}
|
|
||||||
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
|
||||||
return nil, fmt.Errorf("assign GetMembers row: %w", err)
|
|
||||||
}
|
|
||||||
items = append(items, item)
|
|
||||||
}
|
|
||||||
if err := rows.Err(); err != nil {
|
|
||||||
return nil, fmt.Errorf("close GetMembers rows: %w", err)
|
|
||||||
}
|
|
||||||
return items, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetMembersBatch implements Querier.GetMembersBatch.
|
|
||||||
func (q *DBQuerier) GetMembersBatch(batch genericBatch, userID string) {
|
|
||||||
batch.Queue(getMembersSQL, userID)
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetMembersScan implements Querier.GetMembersScan.
|
|
||||||
func (q *DBQuerier) GetMembersScan(results pgx.BatchResults) ([]GetMembersRow, error) {
|
|
||||||
rows, err := results.Query()
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("query GetMembersBatch: %w", err)
|
|
||||||
}
|
|
||||||
defer rows.Close()
|
|
||||||
items := []GetMembersRow{}
|
|
||||||
namesArray := q.types.newFieldEntryArray()
|
|
||||||
pronounsArray := q.types.newPronounEntryArray()
|
|
||||||
for rows.Next() {
|
|
||||||
var item GetMembersRow
|
|
||||||
if err := rows.Scan(&item.ID, &item.UserID, &item.Name, &item.Bio, &item.AvatarUrls, &item.Links, &item.DisplayName, namesArray, pronounsArray); err != nil {
|
|
||||||
return nil, fmt.Errorf("scan GetMembersBatch row: %w", err)
|
|
||||||
}
|
|
||||||
if err := namesArray.AssignTo(&item.Names); err != nil {
|
|
||||||
return nil, fmt.Errorf("assign GetMembers row: %w", err)
|
|
||||||
}
|
|
||||||
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
|
||||||
return nil, fmt.Errorf("assign GetMembers row: %w", err)
|
|
||||||
}
|
|
||||||
items = append(items, item)
|
|
||||||
}
|
|
||||||
if err := rows.Err(); err != nil {
|
|
||||||
return nil, fmt.Errorf("close GetMembersBatch rows: %w", err)
|
|
||||||
}
|
|
||||||
return items, err
|
|
||||||
}
|
|
||||||
|
|
||||||
const updateMemberNamesPronounsSQL = `UPDATE members SET
|
|
||||||
names = $1,
|
|
||||||
pronouns = $2
|
|
||||||
WHERE id = $3
|
|
||||||
RETURNING *;`
|
|
||||||
|
|
||||||
type UpdateMemberNamesPronounsParams struct {
|
|
||||||
Names []FieldEntry
|
|
||||||
Pronouns []PronounEntry
|
|
||||||
ID string
|
|
||||||
}
|
|
||||||
|
|
||||||
type UpdateMemberNamesPronounsRow struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
UserID string `json:"user_id"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
Bio *string `json:"bio"`
|
|
||||||
AvatarUrls []string `json:"avatar_urls"`
|
|
||||||
Links []string `json:"links"`
|
|
||||||
DisplayName *string `json:"display_name"`
|
|
||||||
Names []FieldEntry `json:"names"`
|
|
||||||
Pronouns []PronounEntry `json:"pronouns"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// UpdateMemberNamesPronouns implements Querier.UpdateMemberNamesPronouns.
|
|
||||||
func (q *DBQuerier) UpdateMemberNamesPronouns(ctx context.Context, params UpdateMemberNamesPronounsParams) (UpdateMemberNamesPronounsRow, error) {
|
|
||||||
ctx = context.WithValue(ctx, "pggen_query_name", "UpdateMemberNamesPronouns")
|
|
||||||
row := q.conn.QueryRow(ctx, updateMemberNamesPronounsSQL, q.types.newFieldEntryArrayInit(params.Names), q.types.newPronounEntryArrayInit(params.Pronouns), params.ID)
|
|
||||||
var item UpdateMemberNamesPronounsRow
|
|
||||||
namesArray := q.types.newFieldEntryArray()
|
|
||||||
pronounsArray := q.types.newPronounEntryArray()
|
|
||||||
if err := row.Scan(&item.ID, &item.UserID, &item.Name, &item.Bio, &item.AvatarUrls, &item.Links, &item.DisplayName, namesArray, pronounsArray); err != nil {
|
|
||||||
return item, fmt.Errorf("query UpdateMemberNamesPronouns: %w", err)
|
|
||||||
}
|
|
||||||
if err := namesArray.AssignTo(&item.Names); err != nil {
|
|
||||||
return item, fmt.Errorf("assign UpdateMemberNamesPronouns row: %w", err)
|
|
||||||
}
|
|
||||||
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
|
||||||
return item, fmt.Errorf("assign UpdateMemberNamesPronouns row: %w", err)
|
|
||||||
}
|
|
||||||
return item, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// UpdateMemberNamesPronounsBatch implements Querier.UpdateMemberNamesPronounsBatch.
|
|
||||||
func (q *DBQuerier) UpdateMemberNamesPronounsBatch(batch genericBatch, params UpdateMemberNamesPronounsParams) {
|
|
||||||
batch.Queue(updateMemberNamesPronounsSQL, q.types.newFieldEntryArrayInit(params.Names), q.types.newPronounEntryArrayInit(params.Pronouns), params.ID)
|
|
||||||
}
|
|
||||||
|
|
||||||
// UpdateMemberNamesPronounsScan implements Querier.UpdateMemberNamesPronounsScan.
|
|
||||||
func (q *DBQuerier) UpdateMemberNamesPronounsScan(results pgx.BatchResults) (UpdateMemberNamesPronounsRow, error) {
|
|
||||||
row := results.QueryRow()
|
|
||||||
var item UpdateMemberNamesPronounsRow
|
|
||||||
namesArray := q.types.newFieldEntryArray()
|
|
||||||
pronounsArray := q.types.newPronounEntryArray()
|
|
||||||
if err := row.Scan(&item.ID, &item.UserID, &item.Name, &item.Bio, &item.AvatarUrls, &item.Links, &item.DisplayName, namesArray, pronounsArray); err != nil {
|
|
||||||
return item, fmt.Errorf("scan UpdateMemberNamesPronounsBatch row: %w", err)
|
|
||||||
}
|
|
||||||
if err := namesArray.AssignTo(&item.Names); err != nil {
|
|
||||||
return item, fmt.Errorf("assign UpdateMemberNamesPronouns row: %w", err)
|
|
||||||
}
|
|
||||||
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
|
||||||
return item, fmt.Errorf("assign UpdateMemberNamesPronouns row: %w", err)
|
|
||||||
}
|
|
||||||
return item, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
const getMemberFieldsSQL = `SELECT * FROM member_fields WHERE member_id = $1 ORDER BY id ASC;`
|
|
||||||
|
|
||||||
type GetMemberFieldsRow struct {
|
|
||||||
MemberID *string `json:"member_id"`
|
|
||||||
ID *int `json:"id"`
|
|
||||||
Name *string `json:"name"`
|
|
||||||
Entries []FieldEntry `json:"entries"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetMemberFields implements Querier.GetMemberFields.
|
|
||||||
func (q *DBQuerier) GetMemberFields(ctx context.Context, memberID string) ([]GetMemberFieldsRow, error) {
|
|
||||||
ctx = context.WithValue(ctx, "pggen_query_name", "GetMemberFields")
|
|
||||||
rows, err := q.conn.Query(ctx, getMemberFieldsSQL, memberID)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("query GetMemberFields: %w", err)
|
|
||||||
}
|
|
||||||
defer rows.Close()
|
|
||||||
items := []GetMemberFieldsRow{}
|
|
||||||
entriesArray := q.types.newFieldEntryArray()
|
|
||||||
for rows.Next() {
|
|
||||||
var item GetMemberFieldsRow
|
|
||||||
if err := rows.Scan(&item.MemberID, &item.ID, &item.Name, entriesArray); err != nil {
|
|
||||||
return nil, fmt.Errorf("scan GetMemberFields row: %w", err)
|
|
||||||
}
|
|
||||||
if err := entriesArray.AssignTo(&item.Entries); err != nil {
|
|
||||||
return nil, fmt.Errorf("assign GetMemberFields row: %w", err)
|
|
||||||
}
|
|
||||||
items = append(items, item)
|
|
||||||
}
|
|
||||||
if err := rows.Err(); err != nil {
|
|
||||||
return nil, fmt.Errorf("close GetMemberFields rows: %w", err)
|
|
||||||
}
|
|
||||||
return items, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetMemberFieldsBatch implements Querier.GetMemberFieldsBatch.
|
|
||||||
func (q *DBQuerier) GetMemberFieldsBatch(batch genericBatch, memberID string) {
|
|
||||||
batch.Queue(getMemberFieldsSQL, memberID)
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetMemberFieldsScan implements Querier.GetMemberFieldsScan.
|
|
||||||
func (q *DBQuerier) GetMemberFieldsScan(results pgx.BatchResults) ([]GetMemberFieldsRow, error) {
|
|
||||||
rows, err := results.Query()
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("query GetMemberFieldsBatch: %w", err)
|
|
||||||
}
|
|
||||||
defer rows.Close()
|
|
||||||
items := []GetMemberFieldsRow{}
|
|
||||||
entriesArray := q.types.newFieldEntryArray()
|
|
||||||
for rows.Next() {
|
|
||||||
var item GetMemberFieldsRow
|
|
||||||
if err := rows.Scan(&item.MemberID, &item.ID, &item.Name, entriesArray); err != nil {
|
|
||||||
return nil, fmt.Errorf("scan GetMemberFieldsBatch row: %w", err)
|
|
||||||
}
|
|
||||||
if err := entriesArray.AssignTo(&item.Entries); err != nil {
|
|
||||||
return nil, fmt.Errorf("assign GetMemberFields row: %w", err)
|
|
||||||
}
|
|
||||||
items = append(items, item)
|
|
||||||
}
|
|
||||||
if err := rows.Err(); err != nil {
|
|
||||||
return nil, fmt.Errorf("close GetMemberFieldsBatch rows: %w", err)
|
|
||||||
}
|
|
||||||
return items, err
|
|
||||||
}
|
|
||||||
|
|
||||||
const insertMemberFieldSQL = `INSERT INTO member_fields
|
|
||||||
(member_id, name, entries) VALUES
|
|
||||||
($1, $2, $3)
|
|
||||||
RETURNING *;`
|
|
||||||
|
|
||||||
type InsertMemberFieldParams struct {
|
|
||||||
MemberID string
|
|
||||||
Name string
|
|
||||||
Entries []FieldEntry
|
|
||||||
}
|
|
||||||
|
|
||||||
type InsertMemberFieldRow struct {
|
|
||||||
MemberID string `json:"member_id"`
|
|
||||||
ID int `json:"id"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
Entries []FieldEntry `json:"entries"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// InsertMemberField implements Querier.InsertMemberField.
|
|
||||||
func (q *DBQuerier) InsertMemberField(ctx context.Context, params InsertMemberFieldParams) (InsertMemberFieldRow, error) {
|
|
||||||
ctx = context.WithValue(ctx, "pggen_query_name", "InsertMemberField")
|
|
||||||
row := q.conn.QueryRow(ctx, insertMemberFieldSQL, params.MemberID, params.Name, q.types.newFieldEntryArrayInit(params.Entries))
|
|
||||||
var item InsertMemberFieldRow
|
|
||||||
entriesArray := q.types.newFieldEntryArray()
|
|
||||||
if err := row.Scan(&item.MemberID, &item.ID, &item.Name, entriesArray); err != nil {
|
|
||||||
return item, fmt.Errorf("query InsertMemberField: %w", err)
|
|
||||||
}
|
|
||||||
if err := entriesArray.AssignTo(&item.Entries); err != nil {
|
|
||||||
return item, fmt.Errorf("assign InsertMemberField row: %w", err)
|
|
||||||
}
|
|
||||||
return item, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// InsertMemberFieldBatch implements Querier.InsertMemberFieldBatch.
|
|
||||||
func (q *DBQuerier) InsertMemberFieldBatch(batch genericBatch, params InsertMemberFieldParams) {
|
|
||||||
batch.Queue(insertMemberFieldSQL, params.MemberID, params.Name, q.types.newFieldEntryArrayInit(params.Entries))
|
|
||||||
}
|
|
||||||
|
|
||||||
// InsertMemberFieldScan implements Querier.InsertMemberFieldScan.
|
|
||||||
func (q *DBQuerier) InsertMemberFieldScan(results pgx.BatchResults) (InsertMemberFieldRow, error) {
|
|
||||||
row := results.QueryRow()
|
|
||||||
var item InsertMemberFieldRow
|
|
||||||
entriesArray := q.types.newFieldEntryArray()
|
|
||||||
if err := row.Scan(&item.MemberID, &item.ID, &item.Name, entriesArray); err != nil {
|
|
||||||
return item, fmt.Errorf("scan InsertMemberFieldBatch row: %w", err)
|
|
||||||
}
|
|
||||||
if err := entriesArray.AssignTo(&item.Entries); err != nil {
|
|
||||||
return item, fmt.Errorf("assign InsertMemberField row: %w", err)
|
|
||||||
}
|
|
||||||
return item, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// textPreferrer wraps a pgtype.ValueTranscoder and sets the preferred encoding
|
|
||||||
// format to text instead binary (the default). pggen uses the text format
|
|
||||||
// when the OID is unknownOID because the binary format requires the OID.
|
|
||||||
// Typically occurs if the results from QueryAllDataTypes aren't passed to
|
|
||||||
// NewQuerierConfig.
|
|
||||||
type textPreferrer struct {
|
|
||||||
pgtype.ValueTranscoder
|
|
||||||
typeName string
|
|
||||||
}
|
|
||||||
|
|
||||||
// PreferredParamFormat implements pgtype.ParamFormatPreferrer.
|
|
||||||
func (t textPreferrer) PreferredParamFormat() int16 { return pgtype.TextFormatCode }
|
|
||||||
|
|
||||||
func (t textPreferrer) NewTypeValue() pgtype.Value {
|
|
||||||
return textPreferrer{ValueTranscoder: pgtype.NewValue(t.ValueTranscoder).(pgtype.ValueTranscoder), typeName: t.typeName}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t textPreferrer) TypeName() string {
|
|
||||||
return t.typeName
|
|
||||||
}
|
|
||||||
|
|
||||||
// unknownOID means we don't know the OID for a type. This is okay for decoding
|
|
||||||
// because pgx call DecodeText or DecodeBinary without requiring the OID. For
|
|
||||||
// encoding parameters, pggen uses textPreferrer if the OID is unknown.
|
|
||||||
const unknownOID = 0
|
|
|
@ -1,21 +0,0 @@
|
||||||
-- name: GetUserByID :one
|
|
||||||
SELECT * FROM users WHERE id = pggen.arg('id');
|
|
||||||
|
|
||||||
-- name: GetUserByUsername :one
|
|
||||||
SELECT * FROM users WHERE username = pggen.arg('username');
|
|
||||||
|
|
||||||
-- name: UpdateUserNamesPronouns :one
|
|
||||||
UPDATE users SET
|
|
||||||
names = pggen.arg('names'),
|
|
||||||
pronouns = pggen.arg('pronouns')
|
|
||||||
WHERE id = pggen.arg('id')
|
|
||||||
RETURNING *;
|
|
||||||
|
|
||||||
-- name: GetUserFields :many
|
|
||||||
SELECT * FROM user_fields WHERE user_id = pggen.arg('user_id') ORDER BY id ASC;
|
|
||||||
|
|
||||||
-- name: InsertUserField :one
|
|
||||||
INSERT INTO user_fields
|
|
||||||
(user_id, name, entries) VALUES
|
|
||||||
(pggen.arg('user_id'), pggen.arg('name'), pggen.arg('entries'))
|
|
||||||
RETURNING *;
|
|
|
@ -1,320 +0,0 @@
|
||||||
// Code generated by pggen. DO NOT EDIT.
|
|
||||||
|
|
||||||
package queries
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"github.com/jackc/pgtype"
|
|
||||||
"github.com/jackc/pgx/v4"
|
|
||||||
)
|
|
||||||
|
|
||||||
const getUserByIDSQL = `SELECT * FROM users WHERE id = $1;`
|
|
||||||
|
|
||||||
type GetUserByIDRow struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
Username string `json:"username"`
|
|
||||||
DisplayName *string `json:"display_name"`
|
|
||||||
Bio *string `json:"bio"`
|
|
||||||
AvatarUrls []string `json:"avatar_urls"`
|
|
||||||
Links []string `json:"links"`
|
|
||||||
Discord *string `json:"discord"`
|
|
||||||
DiscordUsername *string `json:"discord_username"`
|
|
||||||
MaxInvites int32 `json:"max_invites"`
|
|
||||||
Names []FieldEntry `json:"names"`
|
|
||||||
Pronouns []PronounEntry `json:"pronouns"`
|
|
||||||
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
|
|
||||||
SelfDelete *bool `json:"self_delete"`
|
|
||||||
DeleteReason *string `json:"delete_reason"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetUserByID implements Querier.GetUserByID.
|
|
||||||
func (q *DBQuerier) GetUserByID(ctx context.Context, id string) (GetUserByIDRow, error) {
|
|
||||||
ctx = context.WithValue(ctx, "pggen_query_name", "GetUserByID")
|
|
||||||
row := q.conn.QueryRow(ctx, getUserByIDSQL, id)
|
|
||||||
var item GetUserByIDRow
|
|
||||||
namesArray := q.types.newFieldEntryArray()
|
|
||||||
pronounsArray := q.types.newPronounEntryArray()
|
|
||||||
if err := row.Scan(&item.ID, &item.Username, &item.DisplayName, &item.Bio, &item.AvatarUrls, &item.Links, &item.Discord, &item.DiscordUsername, &item.MaxInvites, namesArray, pronounsArray, &item.DeletedAt, &item.SelfDelete, &item.DeleteReason); err != nil {
|
|
||||||
return item, fmt.Errorf("query GetUserByID: %w", err)
|
|
||||||
}
|
|
||||||
if err := namesArray.AssignTo(&item.Names); err != nil {
|
|
||||||
return item, fmt.Errorf("assign GetUserByID row: %w", err)
|
|
||||||
}
|
|
||||||
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
|
||||||
return item, fmt.Errorf("assign GetUserByID row: %w", err)
|
|
||||||
}
|
|
||||||
return item, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetUserByIDBatch implements Querier.GetUserByIDBatch.
|
|
||||||
func (q *DBQuerier) GetUserByIDBatch(batch genericBatch, id string) {
|
|
||||||
batch.Queue(getUserByIDSQL, id)
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetUserByIDScan implements Querier.GetUserByIDScan.
|
|
||||||
func (q *DBQuerier) GetUserByIDScan(results pgx.BatchResults) (GetUserByIDRow, error) {
|
|
||||||
row := results.QueryRow()
|
|
||||||
var item GetUserByIDRow
|
|
||||||
namesArray := q.types.newFieldEntryArray()
|
|
||||||
pronounsArray := q.types.newPronounEntryArray()
|
|
||||||
if err := row.Scan(&item.ID, &item.Username, &item.DisplayName, &item.Bio, &item.AvatarUrls, &item.Links, &item.Discord, &item.DiscordUsername, &item.MaxInvites, namesArray, pronounsArray, &item.DeletedAt, &item.SelfDelete, &item.DeleteReason); err != nil {
|
|
||||||
return item, fmt.Errorf("scan GetUserByIDBatch row: %w", err)
|
|
||||||
}
|
|
||||||
if err := namesArray.AssignTo(&item.Names); err != nil {
|
|
||||||
return item, fmt.Errorf("assign GetUserByID row: %w", err)
|
|
||||||
}
|
|
||||||
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
|
||||||
return item, fmt.Errorf("assign GetUserByID row: %w", err)
|
|
||||||
}
|
|
||||||
return item, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
const getUserByUsernameSQL = `SELECT * FROM users WHERE username = $1;`
|
|
||||||
|
|
||||||
type GetUserByUsernameRow struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
Username string `json:"username"`
|
|
||||||
DisplayName *string `json:"display_name"`
|
|
||||||
Bio *string `json:"bio"`
|
|
||||||
AvatarUrls []string `json:"avatar_urls"`
|
|
||||||
Links []string `json:"links"`
|
|
||||||
Discord *string `json:"discord"`
|
|
||||||
DiscordUsername *string `json:"discord_username"`
|
|
||||||
MaxInvites int32 `json:"max_invites"`
|
|
||||||
Names []FieldEntry `json:"names"`
|
|
||||||
Pronouns []PronounEntry `json:"pronouns"`
|
|
||||||
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
|
|
||||||
SelfDelete *bool `json:"self_delete"`
|
|
||||||
DeleteReason *string `json:"delete_reason"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetUserByUsername implements Querier.GetUserByUsername.
|
|
||||||
func (q *DBQuerier) GetUserByUsername(ctx context.Context, username string) (GetUserByUsernameRow, error) {
|
|
||||||
ctx = context.WithValue(ctx, "pggen_query_name", "GetUserByUsername")
|
|
||||||
row := q.conn.QueryRow(ctx, getUserByUsernameSQL, username)
|
|
||||||
var item GetUserByUsernameRow
|
|
||||||
namesArray := q.types.newFieldEntryArray()
|
|
||||||
pronounsArray := q.types.newPronounEntryArray()
|
|
||||||
if err := row.Scan(&item.ID, &item.Username, &item.DisplayName, &item.Bio, &item.AvatarUrls, &item.Links, &item.Discord, &item.DiscordUsername, &item.MaxInvites, namesArray, pronounsArray, &item.DeletedAt, &item.SelfDelete, &item.DeleteReason); err != nil {
|
|
||||||
return item, fmt.Errorf("query GetUserByUsername: %w", err)
|
|
||||||
}
|
|
||||||
if err := namesArray.AssignTo(&item.Names); err != nil {
|
|
||||||
return item, fmt.Errorf("assign GetUserByUsername row: %w", err)
|
|
||||||
}
|
|
||||||
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
|
||||||
return item, fmt.Errorf("assign GetUserByUsername row: %w", err)
|
|
||||||
}
|
|
||||||
return item, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetUserByUsernameBatch implements Querier.GetUserByUsernameBatch.
|
|
||||||
func (q *DBQuerier) GetUserByUsernameBatch(batch genericBatch, username string) {
|
|
||||||
batch.Queue(getUserByUsernameSQL, username)
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetUserByUsernameScan implements Querier.GetUserByUsernameScan.
|
|
||||||
func (q *DBQuerier) GetUserByUsernameScan(results pgx.BatchResults) (GetUserByUsernameRow, error) {
|
|
||||||
row := results.QueryRow()
|
|
||||||
var item GetUserByUsernameRow
|
|
||||||
namesArray := q.types.newFieldEntryArray()
|
|
||||||
pronounsArray := q.types.newPronounEntryArray()
|
|
||||||
if err := row.Scan(&item.ID, &item.Username, &item.DisplayName, &item.Bio, &item.AvatarUrls, &item.Links, &item.Discord, &item.DiscordUsername, &item.MaxInvites, namesArray, pronounsArray, &item.DeletedAt, &item.SelfDelete, &item.DeleteReason); err != nil {
|
|
||||||
return item, fmt.Errorf("scan GetUserByUsernameBatch row: %w", err)
|
|
||||||
}
|
|
||||||
if err := namesArray.AssignTo(&item.Names); err != nil {
|
|
||||||
return item, fmt.Errorf("assign GetUserByUsername row: %w", err)
|
|
||||||
}
|
|
||||||
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
|
||||||
return item, fmt.Errorf("assign GetUserByUsername row: %w", err)
|
|
||||||
}
|
|
||||||
return item, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
const updateUserNamesPronounsSQL = `UPDATE users SET
|
|
||||||
names = $1,
|
|
||||||
pronouns = $2
|
|
||||||
WHERE id = $3
|
|
||||||
RETURNING *;`
|
|
||||||
|
|
||||||
type UpdateUserNamesPronounsParams struct {
|
|
||||||
Names []FieldEntry
|
|
||||||
Pronouns []PronounEntry
|
|
||||||
ID string
|
|
||||||
}
|
|
||||||
|
|
||||||
type UpdateUserNamesPronounsRow struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
Username string `json:"username"`
|
|
||||||
DisplayName *string `json:"display_name"`
|
|
||||||
Bio *string `json:"bio"`
|
|
||||||
AvatarUrls []string `json:"avatar_urls"`
|
|
||||||
Links []string `json:"links"`
|
|
||||||
Discord *string `json:"discord"`
|
|
||||||
DiscordUsername *string `json:"discord_username"`
|
|
||||||
MaxInvites int32 `json:"max_invites"`
|
|
||||||
Names []FieldEntry `json:"names"`
|
|
||||||
Pronouns []PronounEntry `json:"pronouns"`
|
|
||||||
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
|
|
||||||
SelfDelete *bool `json:"self_delete"`
|
|
||||||
DeleteReason *string `json:"delete_reason"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// UpdateUserNamesPronouns implements Querier.UpdateUserNamesPronouns.
|
|
||||||
func (q *DBQuerier) UpdateUserNamesPronouns(ctx context.Context, params UpdateUserNamesPronounsParams) (UpdateUserNamesPronounsRow, error) {
|
|
||||||
ctx = context.WithValue(ctx, "pggen_query_name", "UpdateUserNamesPronouns")
|
|
||||||
row := q.conn.QueryRow(ctx, updateUserNamesPronounsSQL, q.types.newFieldEntryArrayInit(params.Names), q.types.newPronounEntryArrayInit(params.Pronouns), params.ID)
|
|
||||||
var item UpdateUserNamesPronounsRow
|
|
||||||
namesArray := q.types.newFieldEntryArray()
|
|
||||||
pronounsArray := q.types.newPronounEntryArray()
|
|
||||||
if err := row.Scan(&item.ID, &item.Username, &item.DisplayName, &item.Bio, &item.AvatarUrls, &item.Links, &item.Discord, &item.DiscordUsername, &item.MaxInvites, namesArray, pronounsArray, &item.DeletedAt, &item.SelfDelete, &item.DeleteReason); err != nil {
|
|
||||||
return item, fmt.Errorf("query UpdateUserNamesPronouns: %w", err)
|
|
||||||
}
|
|
||||||
if err := namesArray.AssignTo(&item.Names); err != nil {
|
|
||||||
return item, fmt.Errorf("assign UpdateUserNamesPronouns row: %w", err)
|
|
||||||
}
|
|
||||||
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
|
||||||
return item, fmt.Errorf("assign UpdateUserNamesPronouns row: %w", err)
|
|
||||||
}
|
|
||||||
return item, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// UpdateUserNamesPronounsBatch implements Querier.UpdateUserNamesPronounsBatch.
|
|
||||||
func (q *DBQuerier) UpdateUserNamesPronounsBatch(batch genericBatch, params UpdateUserNamesPronounsParams) {
|
|
||||||
batch.Queue(updateUserNamesPronounsSQL, q.types.newFieldEntryArrayInit(params.Names), q.types.newPronounEntryArrayInit(params.Pronouns), params.ID)
|
|
||||||
}
|
|
||||||
|
|
||||||
// UpdateUserNamesPronounsScan implements Querier.UpdateUserNamesPronounsScan.
|
|
||||||
func (q *DBQuerier) UpdateUserNamesPronounsScan(results pgx.BatchResults) (UpdateUserNamesPronounsRow, error) {
|
|
||||||
row := results.QueryRow()
|
|
||||||
var item UpdateUserNamesPronounsRow
|
|
||||||
namesArray := q.types.newFieldEntryArray()
|
|
||||||
pronounsArray := q.types.newPronounEntryArray()
|
|
||||||
if err := row.Scan(&item.ID, &item.Username, &item.DisplayName, &item.Bio, &item.AvatarUrls, &item.Links, &item.Discord, &item.DiscordUsername, &item.MaxInvites, namesArray, pronounsArray, &item.DeletedAt, &item.SelfDelete, &item.DeleteReason); err != nil {
|
|
||||||
return item, fmt.Errorf("scan UpdateUserNamesPronounsBatch row: %w", err)
|
|
||||||
}
|
|
||||||
if err := namesArray.AssignTo(&item.Names); err != nil {
|
|
||||||
return item, fmt.Errorf("assign UpdateUserNamesPronouns row: %w", err)
|
|
||||||
}
|
|
||||||
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
|
||||||
return item, fmt.Errorf("assign UpdateUserNamesPronouns row: %w", err)
|
|
||||||
}
|
|
||||||
return item, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
const getUserFieldsSQL = `SELECT * FROM user_fields WHERE user_id = $1 ORDER BY id ASC;`
|
|
||||||
|
|
||||||
type GetUserFieldsRow struct {
|
|
||||||
UserID *string `json:"user_id"`
|
|
||||||
ID *int `json:"id"`
|
|
||||||
Name *string `json:"name"`
|
|
||||||
Entries []FieldEntry `json:"entries"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetUserFields implements Querier.GetUserFields.
|
|
||||||
func (q *DBQuerier) GetUserFields(ctx context.Context, userID string) ([]GetUserFieldsRow, error) {
|
|
||||||
ctx = context.WithValue(ctx, "pggen_query_name", "GetUserFields")
|
|
||||||
rows, err := q.conn.Query(ctx, getUserFieldsSQL, userID)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("query GetUserFields: %w", err)
|
|
||||||
}
|
|
||||||
defer rows.Close()
|
|
||||||
items := []GetUserFieldsRow{}
|
|
||||||
entriesArray := q.types.newFieldEntryArray()
|
|
||||||
for rows.Next() {
|
|
||||||
var item GetUserFieldsRow
|
|
||||||
if err := rows.Scan(&item.UserID, &item.ID, &item.Name, entriesArray); err != nil {
|
|
||||||
return nil, fmt.Errorf("scan GetUserFields row: %w", err)
|
|
||||||
}
|
|
||||||
if err := entriesArray.AssignTo(&item.Entries); err != nil {
|
|
||||||
return nil, fmt.Errorf("assign GetUserFields row: %w", err)
|
|
||||||
}
|
|
||||||
items = append(items, item)
|
|
||||||
}
|
|
||||||
if err := rows.Err(); err != nil {
|
|
||||||
return nil, fmt.Errorf("close GetUserFields rows: %w", err)
|
|
||||||
}
|
|
||||||
return items, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetUserFieldsBatch implements Querier.GetUserFieldsBatch.
|
|
||||||
func (q *DBQuerier) GetUserFieldsBatch(batch genericBatch, userID string) {
|
|
||||||
batch.Queue(getUserFieldsSQL, userID)
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetUserFieldsScan implements Querier.GetUserFieldsScan.
|
|
||||||
func (q *DBQuerier) GetUserFieldsScan(results pgx.BatchResults) ([]GetUserFieldsRow, error) {
|
|
||||||
rows, err := results.Query()
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("query GetUserFieldsBatch: %w", err)
|
|
||||||
}
|
|
||||||
defer rows.Close()
|
|
||||||
items := []GetUserFieldsRow{}
|
|
||||||
entriesArray := q.types.newFieldEntryArray()
|
|
||||||
for rows.Next() {
|
|
||||||
var item GetUserFieldsRow
|
|
||||||
if err := rows.Scan(&item.UserID, &item.ID, &item.Name, entriesArray); err != nil {
|
|
||||||
return nil, fmt.Errorf("scan GetUserFieldsBatch row: %w", err)
|
|
||||||
}
|
|
||||||
if err := entriesArray.AssignTo(&item.Entries); err != nil {
|
|
||||||
return nil, fmt.Errorf("assign GetUserFields row: %w", err)
|
|
||||||
}
|
|
||||||
items = append(items, item)
|
|
||||||
}
|
|
||||||
if err := rows.Err(); err != nil {
|
|
||||||
return nil, fmt.Errorf("close GetUserFieldsBatch rows: %w", err)
|
|
||||||
}
|
|
||||||
return items, err
|
|
||||||
}
|
|
||||||
|
|
||||||
const insertUserFieldSQL = `INSERT INTO user_fields
|
|
||||||
(user_id, name, entries) VALUES
|
|
||||||
($1, $2, $3)
|
|
||||||
RETURNING *;`
|
|
||||||
|
|
||||||
type InsertUserFieldParams struct {
|
|
||||||
UserID string
|
|
||||||
Name string
|
|
||||||
Entries []FieldEntry
|
|
||||||
}
|
|
||||||
|
|
||||||
type InsertUserFieldRow struct {
|
|
||||||
UserID string `json:"user_id"`
|
|
||||||
ID int `json:"id"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
Entries []FieldEntry `json:"entries"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// InsertUserField implements Querier.InsertUserField.
|
|
||||||
func (q *DBQuerier) InsertUserField(ctx context.Context, params InsertUserFieldParams) (InsertUserFieldRow, error) {
|
|
||||||
ctx = context.WithValue(ctx, "pggen_query_name", "InsertUserField")
|
|
||||||
row := q.conn.QueryRow(ctx, insertUserFieldSQL, params.UserID, params.Name, q.types.newFieldEntryArrayInit(params.Entries))
|
|
||||||
var item InsertUserFieldRow
|
|
||||||
entriesArray := q.types.newFieldEntryArray()
|
|
||||||
if err := row.Scan(&item.UserID, &item.ID, &item.Name, entriesArray); err != nil {
|
|
||||||
return item, fmt.Errorf("query InsertUserField: %w", err)
|
|
||||||
}
|
|
||||||
if err := entriesArray.AssignTo(&item.Entries); err != nil {
|
|
||||||
return item, fmt.Errorf("assign InsertUserField row: %w", err)
|
|
||||||
}
|
|
||||||
return item, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// InsertUserFieldBatch implements Querier.InsertUserFieldBatch.
|
|
||||||
func (q *DBQuerier) InsertUserFieldBatch(batch genericBatch, params InsertUserFieldParams) {
|
|
||||||
batch.Queue(insertUserFieldSQL, params.UserID, params.Name, q.types.newFieldEntryArrayInit(params.Entries))
|
|
||||||
}
|
|
||||||
|
|
||||||
// InsertUserFieldScan implements Querier.InsertUserFieldScan.
|
|
||||||
func (q *DBQuerier) InsertUserFieldScan(results pgx.BatchResults) (InsertUserFieldRow, error) {
|
|
||||||
row := results.QueryRow()
|
|
||||||
var item InsertUserFieldRow
|
|
||||||
entriesArray := q.types.newFieldEntryArray()
|
|
||||||
if err := row.Scan(&item.UserID, &item.ID, &item.Name, entriesArray); err != nil {
|
|
||||||
return item, fmt.Errorf("scan InsertUserFieldBatch row: %w", err)
|
|
||||||
}
|
|
||||||
if err := entriesArray.AssignTo(&item.Entries); err != nil {
|
|
||||||
return item, fmt.Errorf("assign InsertUserField row: %w", err)
|
|
||||||
}
|
|
||||||
return item, nil
|
|
||||||
}
|
|
|
@ -97,13 +97,13 @@ func (db *DB) InvalidateToken(ctx context.Context, userID xid.ID, tokenID xid.ID
|
||||||
return t, nil
|
return t, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (db *DB) InvalidateAllTokens(ctx context.Context, q querier, userID xid.ID) error {
|
func (db *DB) InvalidateAllTokens(ctx context.Context, tx pgx.Tx, userID xid.ID) error {
|
||||||
sql, args, err := sq.Update("tokens").Where("user_id = ?", userID).Set("invalidated", true).ToSql()
|
sql, args, err := sq.Update("tokens").Where("user_id = ?", userID).Set("invalidated", true).ToSql()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "building sql")
|
return errors.Wrap(err, "building sql")
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = q.Exec(ctx, sql, args...)
|
_, err = tx.Exec(ctx, sql, args...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "executing query")
|
return errors.Wrap(err, "executing query")
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,11 +5,10 @@ import (
|
||||||
"regexp"
|
"regexp"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"codeberg.org/u1f320/pronouns.cc/backend/db/queries"
|
|
||||||
"emperror.dev/errors"
|
"emperror.dev/errors"
|
||||||
"github.com/bwmarrin/discordgo"
|
"github.com/bwmarrin/discordgo"
|
||||||
|
"github.com/georgysavva/scany/pgxscan"
|
||||||
"github.com/jackc/pgconn"
|
"github.com/jackc/pgconn"
|
||||||
"github.com/jackc/pgtype"
|
|
||||||
"github.com/jackc/pgx/v4"
|
"github.com/jackc/pgx/v4"
|
||||||
"github.com/rs/xid"
|
"github.com/rs/xid"
|
||||||
)
|
)
|
||||||
|
@ -70,13 +69,12 @@ func (db *DB) CreateUser(ctx context.Context, tx pgx.Tx, username string) (u Use
|
||||||
return u, ErrInvalidUsername
|
return u, ErrInvalidUsername
|
||||||
}
|
}
|
||||||
|
|
||||||
sql, args, err := sq.Insert("users").Columns("id", "username").Values(xid.New(), username).Suffix("RETURNING id").ToSql()
|
sql, args, err := sq.Insert("users").Columns("id", "username").Values(xid.New(), username).Suffix("RETURNING *").ToSql()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return u, errors.Wrap(err, "building sql")
|
return u, errors.Wrap(err, "building sql")
|
||||||
}
|
}
|
||||||
|
|
||||||
var id xid.ID
|
err = pgxscan.Get(ctx, tx, &u, sql, args...)
|
||||||
err = tx.QueryRow(ctx, sql, args...).Scan(&id)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
pge := &pgconn.PgError{}
|
pge := &pgconn.PgError{}
|
||||||
if errors.As(err, &pge) {
|
if errors.As(err, &pge) {
|
||||||
|
@ -88,31 +86,28 @@ func (db *DB) CreateUser(ctx context.Context, tx pgx.Tx, username string) (u Use
|
||||||
|
|
||||||
return u, errors.Cause(err)
|
return u, errors.Cause(err)
|
||||||
}
|
}
|
||||||
|
return u, nil
|
||||||
return db.getUser(ctx, tx, id)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// DiscordUser fetches a user by Discord user ID.
|
// DiscordUser fetches a user by Discord user ID.
|
||||||
func (db *DB) DiscordUser(ctx context.Context, discordID string) (u User, err error) {
|
func (db *DB) DiscordUser(ctx context.Context, discordID string) (u User, err error) {
|
||||||
sql, args, err := sq.Select("id").From("users").Where("discord = ?", discordID).ToSql()
|
sql, args, err := sq.Select("*").From("users").Where("discord = ?", discordID).ToSql()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return u, errors.Wrap(err, "building sql")
|
return u, errors.Wrap(err, "building sql")
|
||||||
}
|
}
|
||||||
|
|
||||||
var id xid.ID
|
err = pgxscan.Get(ctx, db, &u, sql, args...)
|
||||||
err = db.QueryRow(ctx, sql, args...).Scan(&id)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if errors.Cause(err) == pgx.ErrNoRows {
|
if errors.Cause(err) == pgx.ErrNoRows {
|
||||||
return u, ErrUserNotFound
|
return u, ErrUserNotFound
|
||||||
}
|
}
|
||||||
|
|
||||||
return u, errors.Wrap(err, "executing id query")
|
return u, errors.Wrap(err, "executing query")
|
||||||
|
}
|
||||||
|
return u, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return db.getUser(ctx, db, id)
|
func (u *User) UpdateFromDiscord(ctx context.Context, ex Execer, du *discordgo.User) error {
|
||||||
}
|
|
||||||
|
|
||||||
func (u *User) UpdateFromDiscord(ctx context.Context, db querier, du *discordgo.User) error {
|
|
||||||
sql, args, err := sq.Update("users").
|
sql, args, err := sq.Update("users").
|
||||||
Set("discord", du.ID).
|
Set("discord", du.ID).
|
||||||
Set("discord_username", du.String()).
|
Set("discord_username", du.String()).
|
||||||
|
@ -122,7 +117,7 @@ func (u *User) UpdateFromDiscord(ctx context.Context, db querier, du *discordgo.
|
||||||
return errors.Wrap(err, "building sql")
|
return errors.Wrap(err, "building sql")
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = db.Exec(ctx, sql, args...)
|
_, err = ex.Exec(ctx, sql, args...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "executing query")
|
return errors.Wrap(err, "executing query")
|
||||||
}
|
}
|
||||||
|
@ -134,49 +129,14 @@ func (u *User) UpdateFromDiscord(ctx context.Context, db querier, du *discordgo.
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (db *DB) getUser(ctx context.Context, q querier, id xid.ID) (u User, err error) {
|
|
||||||
qu, err := queries.NewQuerier(q).GetUserByID(ctx, id.String())
|
|
||||||
if err != nil {
|
|
||||||
if errors.Cause(err) == pgx.ErrNoRows {
|
|
||||||
return u, ErrUserNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
return u, errors.Wrap(err, "getting user from database")
|
|
||||||
}
|
|
||||||
|
|
||||||
var deletedAt *time.Time
|
|
||||||
if qu.DeletedAt.Status == pgtype.Present {
|
|
||||||
deletedAt = &qu.DeletedAt.Time
|
|
||||||
}
|
|
||||||
|
|
||||||
u = User{
|
|
||||||
ID: id,
|
|
||||||
Username: qu.Username,
|
|
||||||
DisplayName: qu.DisplayName,
|
|
||||||
Bio: qu.Bio,
|
|
||||||
AvatarURLs: qu.AvatarUrls,
|
|
||||||
Names: fieldEntriesFromDB(qu.Names),
|
|
||||||
Pronouns: pronounsFromDB(qu.Pronouns),
|
|
||||||
Links: qu.Links,
|
|
||||||
Discord: qu.Discord,
|
|
||||||
DiscordUsername: qu.DiscordUsername,
|
|
||||||
MaxInvites: int(qu.MaxInvites),
|
|
||||||
DeletedAt: deletedAt,
|
|
||||||
SelfDelete: qu.SelfDelete,
|
|
||||||
DeleteReason: qu.DeleteReason,
|
|
||||||
}
|
|
||||||
|
|
||||||
return u, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// User gets a user by ID.
|
// User gets a user by ID.
|
||||||
func (db *DB) User(ctx context.Context, id xid.ID) (u User, err error) {
|
func (db *DB) User(ctx context.Context, id xid.ID) (u User, err error) {
|
||||||
return db.getUser(ctx, db, id)
|
sql, args, err := sq.Select("*").From("users").Where("id = ?", id).ToSql()
|
||||||
|
if err != nil {
|
||||||
|
return u, errors.Wrap(err, "building sql")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Username gets a user by username.
|
err = pgxscan.Get(ctx, db, &u, sql, args...)
|
||||||
func (db *DB) Username(ctx context.Context, name string) (u User, err error) {
|
|
||||||
qu, err := db.q.GetUserByUsername(ctx, name)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if errors.Cause(err) == pgx.ErrNoRows {
|
if errors.Cause(err) == pgx.ErrNoRows {
|
||||||
return u, ErrUserNotFound
|
return u, ErrUserNotFound
|
||||||
|
@ -185,23 +145,23 @@ func (db *DB) Username(ctx context.Context, name string) (u User, err error) {
|
||||||
return u, errors.Wrap(err, "getting user from db")
|
return u, errors.Wrap(err, "getting user from db")
|
||||||
}
|
}
|
||||||
|
|
||||||
id, err := xid.FromString(qu.ID)
|
return u, nil
|
||||||
if err != nil {
|
|
||||||
return u, errors.Wrap(err, "parsing ID")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
u = User{
|
// Username gets a user by username.
|
||||||
ID: id,
|
func (db *DB) Username(ctx context.Context, name string) (u User, err error) {
|
||||||
Username: qu.Username,
|
sql, args, err := sq.Select("*").From("users").Where("username = ?", name).ToSql()
|
||||||
DisplayName: qu.DisplayName,
|
if err != nil {
|
||||||
Bio: qu.Bio,
|
return u, errors.Wrap(err, "building sql")
|
||||||
AvatarURLs: qu.AvatarUrls,
|
}
|
||||||
Names: fieldEntriesFromDB(qu.Names),
|
|
||||||
Pronouns: pronounsFromDB(qu.Pronouns),
|
err = pgxscan.Get(ctx, db, &u, sql, args...)
|
||||||
Links: qu.Links,
|
if err != nil {
|
||||||
Discord: qu.Discord,
|
if errors.Cause(err) == pgx.ErrNoRows {
|
||||||
DiscordUsername: qu.DiscordUsername,
|
return u, ErrUserNotFound
|
||||||
MaxInvites: int(qu.MaxInvites),
|
}
|
||||||
|
|
||||||
|
return u, errors.Wrap(err, "getting user from db")
|
||||||
}
|
}
|
||||||
|
|
||||||
return u, nil
|
return u, nil
|
||||||
|
@ -251,10 +211,20 @@ func (db *DB) UpdateUser(
|
||||||
avatarURLs []string,
|
avatarURLs []string,
|
||||||
) (u User, err error) {
|
) (u User, err error) {
|
||||||
if displayName == nil && bio == nil && links == nil && avatarURLs == nil {
|
if displayName == nil && bio == nil && links == nil && avatarURLs == nil {
|
||||||
return db.getUser(ctx, tx, id)
|
sql, args, err := sq.Select("*").From("users").Where("id = ?", id).ToSql()
|
||||||
|
if err != nil {
|
||||||
|
return u, errors.Wrap(err, "building sql")
|
||||||
}
|
}
|
||||||
|
|
||||||
builder := sq.Update("users").Where("id = ?", id)
|
err = pgxscan.Get(ctx, db, &u, sql, args...)
|
||||||
|
if err != nil {
|
||||||
|
return u, errors.Wrap(err, "getting user from db")
|
||||||
|
}
|
||||||
|
|
||||||
|
return u, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
builder := sq.Update("users").Where("id = ?", id).Suffix("RETURNING *")
|
||||||
if displayName != nil {
|
if displayName != nil {
|
||||||
if *displayName == "" {
|
if *displayName == "" {
|
||||||
builder = builder.Set("display_name", nil)
|
builder = builder.Set("display_name", nil)
|
||||||
|
@ -270,39 +240,26 @@ func (db *DB) UpdateUser(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if links != nil {
|
if links != nil {
|
||||||
if len(*links) == 0 {
|
|
||||||
builder = builder.Set("links", nil)
|
|
||||||
} else {
|
|
||||||
builder = builder.Set("links", *links)
|
builder = builder.Set("links", *links)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if avatarURLs != nil {
|
if avatarURLs != nil {
|
||||||
if len(avatarURLs) == 0 {
|
|
||||||
builder = builder.Set("avatar_urls", nil)
|
|
||||||
} else {
|
|
||||||
builder = builder.Set("avatar_urls", avatarURLs)
|
builder = builder.Set("avatar_urls", avatarURLs)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
sql, args, err := builder.ToSql()
|
sql, args, err := builder.ToSql()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return u, errors.Wrap(err, "building sql")
|
return u, errors.Wrap(err, "building sql")
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = tx.Exec(ctx, sql, args...)
|
err = pgxscan.Get(ctx, tx, &u, sql, args...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return u, errors.Wrap(err, "executing sql")
|
return u, errors.Wrap(err, "executing sql")
|
||||||
}
|
}
|
||||||
|
|
||||||
u, err = db.getUser(ctx, tx, id)
|
|
||||||
if err != nil {
|
|
||||||
return u, errors.Wrap(err, "getting updated user")
|
|
||||||
}
|
|
||||||
return u, nil
|
return u, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (db *DB) DeleteUser(ctx context.Context, q querier, id xid.ID, selfDelete bool, reason string) error {
|
func (db *DB) DeleteUser(ctx context.Context, tx pgx.Tx, id xid.ID, selfDelete bool, reason string) error {
|
||||||
builder := sq.Update("users").Set("deleted_at", time.Now().UTC()).Set("self_delete", selfDelete).Where("id = ?", id)
|
builder := sq.Update("users").Set("deleted_at", time.Now().UTC()).Set("self_delete", selfDelete).Where("id = ?", id)
|
||||||
if !selfDelete {
|
if !selfDelete {
|
||||||
builder = builder.Set("delete_reason", reason)
|
builder = builder.Set("delete_reason", reason)
|
||||||
|
@ -312,7 +269,7 @@ func (db *DB) DeleteUser(ctx context.Context, q querier, id xid.ID, selfDelete b
|
||||||
return errors.Wrap(err, "building sql")
|
return errors.Wrap(err, "building sql")
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = q.Exec(ctx, sql, args...)
|
_, err = tx.Exec(ctx, sql, args...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "executing query")
|
return errors.Wrap(err, "executing query")
|
||||||
}
|
}
|
||||||
|
|
2
go.mod
2
go.mod
|
@ -13,7 +13,6 @@ require (
|
||||||
github.com/gobwas/glob v0.2.3
|
github.com/gobwas/glob v0.2.3
|
||||||
github.com/golang-jwt/jwt/v4 v4.4.1
|
github.com/golang-jwt/jwt/v4 v4.4.1
|
||||||
github.com/jackc/pgconn v1.12.0
|
github.com/jackc/pgconn v1.12.0
|
||||||
github.com/jackc/pgtype v1.11.0
|
|
||||||
github.com/jackc/pgx/v4 v4.16.0
|
github.com/jackc/pgx/v4 v4.16.0
|
||||||
github.com/joho/godotenv v1.4.0
|
github.com/joho/godotenv v1.4.0
|
||||||
github.com/mediocregopher/radix/v4 v4.1.0
|
github.com/mediocregopher/radix/v4 v4.1.0
|
||||||
|
@ -36,6 +35,7 @@ require (
|
||||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||||
github.com/jackc/pgproto3/v2 v2.3.0 // indirect
|
github.com/jackc/pgproto3/v2 v2.3.0 // indirect
|
||||||
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect
|
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect
|
||||||
|
github.com/jackc/pgtype v1.11.0 // indirect
|
||||||
github.com/jackc/puddle v1.2.1 // indirect
|
github.com/jackc/puddle v1.2.1 // indirect
|
||||||
github.com/json-iterator/go v1.1.12 // indirect
|
github.com/json-iterator/go v1.1.12 // indirect
|
||||||
github.com/klauspost/compress v1.15.9 // indirect
|
github.com/klauspost/compress v1.15.9 // indirect
|
||||||
|
|
24
scripts/migrate/006_jsonb_arrays.sql
Normal file
24
scripts/migrate/006_jsonb_arrays.sql
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
-- +migrate Up
|
||||||
|
|
||||||
|
-- 2023-03-11: Change composite type arrays to use jsonb columns
|
||||||
|
-- Composite types aren't actually supported by pgx and this allows us to drop pggen as a dev dependency.
|
||||||
|
|
||||||
|
-- Delete old columns
|
||||||
|
alter table users drop column names;
|
||||||
|
alter table users drop column pronouns;
|
||||||
|
|
||||||
|
alter table members drop column names;
|
||||||
|
alter table members drop column pronouns;
|
||||||
|
|
||||||
|
alter table user_fields drop column entries;
|
||||||
|
alter table member_fields drop column entries;
|
||||||
|
|
||||||
|
-- Create new columns
|
||||||
|
alter table users add column names jsonb not null default '[]';
|
||||||
|
alter table users add column pronouns jsonb not null default '[]';
|
||||||
|
|
||||||
|
alter table members add column names jsonb not null default '[]';
|
||||||
|
alter table members add column pronouns jsonb not null default '[]';
|
||||||
|
|
||||||
|
alter table user_fields add column entries jsonb not null default '[]';
|
||||||
|
alter table member_fields add column entries jsonb not null default '[]';
|
Loading…
Reference in a new issue