feat: create initial database schema
This commit is contained in:
parent
00bd014cdd
commit
875a6d1a33
11 changed files with 155 additions and 0 deletions
32
internal/db/db.go
Normal file
32
internal/db/db.go
Normal file
|
@ -0,0 +1,32 @@
|
|||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.25.0
|
||||
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgconn"
|
||||
)
|
||||
|
||||
type DBTX interface {
|
||||
Exec(context.Context, string, ...interface{}) (pgconn.CommandTag, error)
|
||||
Query(context.Context, string, ...interface{}) (pgx.Rows, error)
|
||||
QueryRow(context.Context, string, ...interface{}) pgx.Row
|
||||
}
|
||||
|
||||
func New(db DBTX) *Queries {
|
||||
return &Queries{db: db}
|
||||
}
|
||||
|
||||
type Queries struct {
|
||||
db DBTX
|
||||
}
|
||||
|
||||
func (q *Queries) WithTx(tx pgx.Tx) *Queries {
|
||||
return &Queries{
|
||||
db: tx,
|
||||
}
|
||||
}
|
13
internal/db/models.go
Normal file
13
internal/db/models.go
Normal file
|
@ -0,0 +1,13 @@
|
|||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.25.0
|
||||
|
||||
package db
|
||||
|
||||
import ()
|
||||
|
||||
type Url struct {
|
||||
ID int32
|
||||
Hash string
|
||||
LongUrl string
|
||||
}
|
22
internal/db/query.sql.go
Normal file
22
internal/db/query.sql.go
Normal file
|
@ -0,0 +1,22 @@
|
|||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.25.0
|
||||
// source: query.sql
|
||||
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
const getURLByHash = `-- name: GetURLByHash :one
|
||||
SELECT id, hash, long_url FROM urls
|
||||
WHERE hash = $1
|
||||
`
|
||||
|
||||
func (q *Queries) GetURLByHash(ctx context.Context, hash string) (Url, error) {
|
||||
row := q.db.QueryRow(ctx, getURLByHash, hash)
|
||||
var i Url
|
||||
err := row.Scan(&i.ID, &i.Hash, &i.LongUrl)
|
||||
return i, err
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue