feat: odd and event history
This commit is contained in:
parent
3fb3da6cc8
commit
c08b786803
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
|
|
@ -1,8 +1,11 @@
|
||||||
{
|
{
|
||||||
"cSpell.words": [
|
"cSpell.words": [
|
||||||
"Cashout",
|
"Cashout",
|
||||||
|
"dbgen",
|
||||||
|
"jackc",
|
||||||
"narg",
|
"narg",
|
||||||
"notificationservice",
|
"notificationservice",
|
||||||
|
"pgtype",
|
||||||
"sqlc"
|
"sqlc"
|
||||||
],
|
],
|
||||||
"cSpell.enabledFileTypes": {
|
"cSpell.enabledFileTypes": {
|
||||||
|
|
|
||||||
|
|
@ -110,7 +110,7 @@ func main() {
|
||||||
authSvc := authentication.NewService(store, store, cfg.RefreshExpiry)
|
authSvc := authentication.NewService(store, store, cfg.RefreshExpiry)
|
||||||
userSvc := user.NewService(store, store, messengerSvc, cfg)
|
userSvc := user.NewService(store, store, messengerSvc, cfg)
|
||||||
eventSvc := event.New(cfg.Bet365Token, store, domain.MongoDBLogger)
|
eventSvc := event.New(cfg.Bet365Token, store, domain.MongoDBLogger)
|
||||||
oddsSvc := odds.New(store, cfg, logger, domain.MongoDBLogger)
|
oddsSvc := odds.New(store, cfg, eventSvc, logger, domain.MongoDBLogger)
|
||||||
notificationRepo := repository.NewNotificationRepository(store)
|
notificationRepo := repository.NewNotificationRepository(store)
|
||||||
virtuaGamesRepo := repository.NewVirtualGameRepository(store)
|
virtuaGamesRepo := repository.NewVirtualGameRepository(store)
|
||||||
notificationSvc := notificationservice.New(notificationRepo, domain.MongoDBLogger, logger, cfg, messengerSvc, userSvc)
|
notificationSvc := notificationservice.New(notificationRepo, domain.MongoDBLogger, logger, cfg, messengerSvc, userSvc)
|
||||||
|
|
|
||||||
|
|
@ -264,11 +264,17 @@ CREATE TABLE events (
|
||||||
fetched_at TIMESTAMP DEFAULT now(),
|
fetched_at TIMESTAMP DEFAULT now(),
|
||||||
source TEXT DEFAULT 'b365api',
|
source TEXT DEFAULT 'b365api',
|
||||||
is_featured BOOLEAN NOT NULL DEFAULT FALSE,
|
is_featured BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
is_monitorred BOOLEAN NOT NULL DEFAULT FALSE,
|
is_monitored BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
is_active BOOLEAN NOT NULL DEFAULT TRUE
|
is_active BOOLEAN NOT NULL DEFAULT TRUE
|
||||||
);
|
);
|
||||||
|
CREATE TABLE event_history (
|
||||||
|
id BIGSERIAL PRIMARY KEY,
|
||||||
|
event_id TEXT NOT NULL,
|
||||||
|
status TEXT NOT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
CREATE TABLE odds (
|
CREATE TABLE odds (
|
||||||
id SERIAL PRIMARY KEY,
|
id BIGSERIAL PRIMARY KEY,
|
||||||
event_id TEXT,
|
event_id TEXT,
|
||||||
fi TEXT,
|
fi TEXT,
|
||||||
market_type TEXT NOT NULL,
|
market_type TEXT NOT NULL,
|
||||||
|
|
@ -288,6 +294,15 @@ CREATE TABLE odds (
|
||||||
UNIQUE (event_id, market_id, name, handicap),
|
UNIQUE (event_id, market_id, name, handicap),
|
||||||
UNIQUE (event_id, market_id)
|
UNIQUE (event_id, market_id)
|
||||||
);
|
);
|
||||||
|
CREATE TABLE odd_history (
|
||||||
|
id BIGSERIAL PRIMARY KEY,
|
||||||
|
odd_id BIGINT NOT NULL,
|
||||||
|
raw_odd_id BIGINT NOT NULL,
|
||||||
|
market_id TEXT NOT NULL,
|
||||||
|
event_id TEXT NOT NULL,
|
||||||
|
odd_value DOUBLE PRECISION NOT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
CREATE TABLE result_log (
|
CREATE TABLE result_log (
|
||||||
id BIGSERIAL PRIMARY KEY,
|
id BIGSERIAL PRIMARY KEY,
|
||||||
status_not_finished_count INT NOT NULL,
|
status_not_finished_count INT NOT NULL,
|
||||||
|
|
|
||||||
36
db/query/event_history.sql
Normal file
36
db/query/event_history.sql
Normal file
|
|
@ -0,0 +1,36 @@
|
||||||
|
-- name: InsertEventHistory :one
|
||||||
|
INSERT INTO event_history (event_id, status)
|
||||||
|
VALUES ($1, $2)
|
||||||
|
RETURNING *;
|
||||||
|
-- name: GetAllEventHistory :many
|
||||||
|
SELECT *
|
||||||
|
FROM event_history
|
||||||
|
WHERE (
|
||||||
|
event_id = sqlc.narg('event_id')
|
||||||
|
OR sqlc.narg('event_id') IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
created_at > sqlc.narg('created_before')
|
||||||
|
OR sqlc.narg('created_before') IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
created_at < sqlc.narg('created_after')
|
||||||
|
OR sqlc.narg('created_after') IS NULL
|
||||||
|
);
|
||||||
|
-- name: GetInitialEventPerDay :many
|
||||||
|
SELECT DISTINCT ON (DATE_TRUNC('day', created_at)) *
|
||||||
|
FROM event_history
|
||||||
|
WHERE (
|
||||||
|
event_id = sqlc.narg('event_id')
|
||||||
|
OR sqlc.narg('event_id') IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
created_at > sqlc.narg('created_before')
|
||||||
|
OR sqlc.narg('created_before') IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
created_at < sqlc.narg('created_after')
|
||||||
|
OR sqlc.narg('created_after') IS NULL
|
||||||
|
)
|
||||||
|
ORDER BY DATE_TRUNC('day', created_at),
|
||||||
|
created_at ASC;
|
||||||
|
|
@ -229,10 +229,18 @@ UPDATE events
|
||||||
SET score = $1,
|
SET score = $1,
|
||||||
status = $2
|
status = $2
|
||||||
WHERE id = $3;
|
WHERE id = $3;
|
||||||
-- name: UpdateFeatured :exec
|
-- name: UpdateEventFeatured :exec
|
||||||
UPDATE events
|
UPDATE events
|
||||||
SET is_featured = $1
|
SET is_featured = $1
|
||||||
WHERE id = $2;
|
WHERE id = $2;
|
||||||
|
-- name: IsEventMonitored :one
|
||||||
|
SELECT is_monitored
|
||||||
|
FROM events
|
||||||
|
WHERE id = $1;
|
||||||
|
-- name: UpdateEventMonitored :exec
|
||||||
|
UPDATE events
|
||||||
|
SET is_monitored = $1
|
||||||
|
WHERE id = $2;
|
||||||
-- name: DeleteEvent :exec
|
-- name: DeleteEvent :exec
|
||||||
DELETE FROM events
|
DELETE FROM events
|
||||||
WHERE id = $1;
|
WHERE id = $1;
|
||||||
67
db/query/odd_history.sql
Normal file
67
db/query/odd_history.sql
Normal file
|
|
@ -0,0 +1,67 @@
|
||||||
|
-- name: InsertOddHistory :one
|
||||||
|
INSERT INTO odd_history (
|
||||||
|
odd_id,
|
||||||
|
market_id,
|
||||||
|
raw_odd_id,
|
||||||
|
event_id,
|
||||||
|
odd_value
|
||||||
|
)
|
||||||
|
VALUES ($1, $2, $3, $4, $5)
|
||||||
|
RETURNING *;
|
||||||
|
-- name: GetAllOddHistory :many
|
||||||
|
SELECT *
|
||||||
|
FROM odd_history
|
||||||
|
WHERE (
|
||||||
|
odd_id = sqlc.narg('odd_id')
|
||||||
|
OR sqlc.narg('odd_id') IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
market_id = sqlc.narg('market_id')
|
||||||
|
OR sqlc.narg('market_id') IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
raw_odd_id = sqlc.narg('raw_odd_id')
|
||||||
|
OR sqlc.narg('raw_odd_id') IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
event_id = sqlc.narg('event_id')
|
||||||
|
OR sqlc.narg('event_id') IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
created_at > sqlc.narg('created_before')
|
||||||
|
OR sqlc.narg('created_before') IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
created_at < sqlc.narg('created_after')
|
||||||
|
OR sqlc.narg('created_after') IS NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- name: GetInitialOddPerDay :many
|
||||||
|
SELECT DISTINCT ON (DATE_TRUNC('day', created_at)) *
|
||||||
|
FROM odd_history
|
||||||
|
WHERE (
|
||||||
|
odd_id = sqlc.narg('odd_id')
|
||||||
|
OR sqlc.narg('odd_id') IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
market_id = sqlc.narg('market_id')
|
||||||
|
OR sqlc.narg('market_id') IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
raw_odd_id = sqlc.narg('raw_odd_id')
|
||||||
|
OR sqlc.narg('raw_odd_id') IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
event_id = sqlc.narg('event_id')
|
||||||
|
OR sqlc.narg('event_id') IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
created_at > sqlc.narg('created_before')
|
||||||
|
OR sqlc.narg('created_before') IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
created_at < sqlc.narg('created_after')
|
||||||
|
OR sqlc.narg('created_after') IS NULL
|
||||||
|
)
|
||||||
|
ORDER BY DATE_TRUNC('day', created_at),
|
||||||
|
created_at ASC;
|
||||||
|
|
@ -46,49 +46,17 @@ SET odds_value = EXCLUDED.odds_value,
|
||||||
source = EXCLUDED.source,
|
source = EXCLUDED.source,
|
||||||
fi = EXCLUDED.fi;
|
fi = EXCLUDED.fi;
|
||||||
-- name: GetPrematchOdds :many
|
-- name: GetPrematchOdds :many
|
||||||
SELECT event_id,
|
SELECT *
|
||||||
fi,
|
|
||||||
market_type,
|
|
||||||
market_name,
|
|
||||||
market_category,
|
|
||||||
market_id,
|
|
||||||
name,
|
|
||||||
handicap,
|
|
||||||
odds_value,
|
|
||||||
section,
|
|
||||||
category,
|
|
||||||
raw_odds,
|
|
||||||
fetched_at,
|
|
||||||
source,
|
|
||||||
is_active
|
|
||||||
FROM odds
|
FROM odds
|
||||||
WHERE is_active = true
|
WHERE is_active = true
|
||||||
AND source = 'bet365';
|
AND source = 'bet365';
|
||||||
-- name: GetALLPrematchOdds :many
|
-- name: GetALLPrematchOdds :many
|
||||||
SELECT event_id,
|
SELECT *
|
||||||
fi,
|
|
||||||
market_type,
|
|
||||||
market_name,
|
|
||||||
market_category,
|
|
||||||
market_id,
|
|
||||||
name,
|
|
||||||
handicap,
|
|
||||||
odds_value,
|
|
||||||
section,
|
|
||||||
category,
|
|
||||||
raw_odds,
|
|
||||||
fetched_at,
|
|
||||||
source,
|
|
||||||
is_active
|
|
||||||
FROM odds
|
FROM odds
|
||||||
WHERE is_active = true
|
WHERE is_active = true
|
||||||
AND source = 'bet365';
|
AND source = 'bet365';
|
||||||
-- name: GetRawOddsByMarketID :one
|
-- name: GetOddsByMarketID :one
|
||||||
SELECT id,
|
SELECT *
|
||||||
market_name,
|
|
||||||
handicap,
|
|
||||||
raw_odds,
|
|
||||||
fetched_at
|
|
||||||
FROM odds
|
FROM odds
|
||||||
WHERE market_id = $1
|
WHERE market_id = $1
|
||||||
AND fi = $2
|
AND fi = $2
|
||||||
|
|
|
||||||
133
gen/db/event_history.sql.go
Normal file
133
gen/db/event_history.sql.go
Normal file
|
|
@ -0,0 +1,133 @@
|
||||||
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
|
// versions:
|
||||||
|
// sqlc v1.29.0
|
||||||
|
// source: event_history.sql
|
||||||
|
|
||||||
|
package dbgen
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/jackc/pgx/v5/pgtype"
|
||||||
|
)
|
||||||
|
|
||||||
|
const GetAllEventHistory = `-- name: GetAllEventHistory :many
|
||||||
|
SELECT id, event_id, status, created_at
|
||||||
|
FROM event_history
|
||||||
|
WHERE (
|
||||||
|
event_id = $1
|
||||||
|
OR $1 IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
created_at > $2
|
||||||
|
OR $2 IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
created_at < $3
|
||||||
|
OR $3 IS NULL
|
||||||
|
)
|
||||||
|
`
|
||||||
|
|
||||||
|
type GetAllEventHistoryParams struct {
|
||||||
|
EventID pgtype.Text `json:"event_id"`
|
||||||
|
CreatedBefore pgtype.Timestamp `json:"created_before"`
|
||||||
|
CreatedAfter pgtype.Timestamp `json:"created_after"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) GetAllEventHistory(ctx context.Context, arg GetAllEventHistoryParams) ([]EventHistory, error) {
|
||||||
|
rows, err := q.db.Query(ctx, GetAllEventHistory, arg.EventID, arg.CreatedBefore, arg.CreatedAfter)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []EventHistory
|
||||||
|
for rows.Next() {
|
||||||
|
var i EventHistory
|
||||||
|
if err := rows.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.EventID,
|
||||||
|
&i.Status,
|
||||||
|
&i.CreatedAt,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const GetInitialEventPerDay = `-- name: GetInitialEventPerDay :many
|
||||||
|
SELECT DISTINCT ON (DATE_TRUNC('day', created_at)) id, event_id, status, created_at
|
||||||
|
FROM event_history
|
||||||
|
WHERE (
|
||||||
|
event_id = $1
|
||||||
|
OR $1 IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
created_at > $2
|
||||||
|
OR $2 IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
created_at < $3
|
||||||
|
OR $3 IS NULL
|
||||||
|
)
|
||||||
|
ORDER BY DATE_TRUNC('day', created_at),
|
||||||
|
created_at ASC
|
||||||
|
`
|
||||||
|
|
||||||
|
type GetInitialEventPerDayParams struct {
|
||||||
|
EventID pgtype.Text `json:"event_id"`
|
||||||
|
CreatedBefore pgtype.Timestamp `json:"created_before"`
|
||||||
|
CreatedAfter pgtype.Timestamp `json:"created_after"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) GetInitialEventPerDay(ctx context.Context, arg GetInitialEventPerDayParams) ([]EventHistory, error) {
|
||||||
|
rows, err := q.db.Query(ctx, GetInitialEventPerDay, arg.EventID, arg.CreatedBefore, arg.CreatedAfter)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []EventHistory
|
||||||
|
for rows.Next() {
|
||||||
|
var i EventHistory
|
||||||
|
if err := rows.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.EventID,
|
||||||
|
&i.Status,
|
||||||
|
&i.CreatedAt,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const InsertEventHistory = `-- name: InsertEventHistory :one
|
||||||
|
INSERT INTO event_history (event_id, status)
|
||||||
|
VALUES ($1, $2)
|
||||||
|
RETURNING id, event_id, status, created_at
|
||||||
|
`
|
||||||
|
|
||||||
|
type InsertEventHistoryParams struct {
|
||||||
|
EventID string `json:"event_id"`
|
||||||
|
Status string `json:"status"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) InsertEventHistory(ctx context.Context, arg InsertEventHistoryParams) (EventHistory, error) {
|
||||||
|
row := q.db.QueryRow(ctx, InsertEventHistory, arg.EventID, arg.Status)
|
||||||
|
var i EventHistory
|
||||||
|
err := row.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.EventID,
|
||||||
|
&i.Status,
|
||||||
|
&i.CreatedAt,
|
||||||
|
)
|
||||||
|
return i, err
|
||||||
|
}
|
||||||
|
|
@ -22,7 +22,7 @@ func (q *Queries) DeleteEvent(ctx context.Context, id string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
const GetAllUpcomingEvents = `-- name: GetAllUpcomingEvents :many
|
const GetAllUpcomingEvents = `-- name: GetAllUpcomingEvents :many
|
||||||
SELECT id, sport_id, match_name, home_team, away_team, home_team_id, away_team_id, home_kit_image, away_kit_image, league_id, league_name, league_cc, start_time, score, match_minute, timer_status, added_time, match_period, is_live, status, fetched_at, source, is_featured, is_monitorred, is_active
|
SELECT id, sport_id, match_name, home_team, away_team, home_team_id, away_team_id, home_kit_image, away_kit_image, league_id, league_name, league_cc, start_time, score, match_minute, timer_status, added_time, match_period, is_live, status, fetched_at, source, is_featured, is_monitored, is_active
|
||||||
FROM events
|
FROM events
|
||||||
WHERE start_time > now()
|
WHERE start_time > now()
|
||||||
AND is_live = false
|
AND is_live = false
|
||||||
|
|
@ -63,7 +63,7 @@ func (q *Queries) GetAllUpcomingEvents(ctx context.Context) ([]Event, error) {
|
||||||
&i.FetchedAt,
|
&i.FetchedAt,
|
||||||
&i.Source,
|
&i.Source,
|
||||||
&i.IsFeatured,
|
&i.IsFeatured,
|
||||||
&i.IsMonitorred,
|
&i.IsMonitored,
|
||||||
&i.IsActive,
|
&i.IsActive,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -77,7 +77,7 @@ func (q *Queries) GetAllUpcomingEvents(ctx context.Context) ([]Event, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
const GetExpiredUpcomingEvents = `-- name: GetExpiredUpcomingEvents :many
|
const GetExpiredUpcomingEvents = `-- name: GetExpiredUpcomingEvents :many
|
||||||
SELECT events.id, events.sport_id, events.match_name, events.home_team, events.away_team, events.home_team_id, events.away_team_id, events.home_kit_image, events.away_kit_image, events.league_id, events.league_name, events.league_cc, events.start_time, events.score, events.match_minute, events.timer_status, events.added_time, events.match_period, events.is_live, events.status, events.fetched_at, events.source, events.is_featured, events.is_monitorred, events.is_active,
|
SELECT events.id, events.sport_id, events.match_name, events.home_team, events.away_team, events.home_team_id, events.away_team_id, events.home_kit_image, events.away_kit_image, events.league_id, events.league_name, events.league_cc, events.start_time, events.score, events.match_minute, events.timer_status, events.added_time, events.match_period, events.is_live, events.status, events.fetched_at, events.source, events.is_featured, events.is_monitored, events.is_active,
|
||||||
leagues.country_code as league_cc
|
leagues.country_code as league_cc
|
||||||
FROM events
|
FROM events
|
||||||
LEFT JOIN leagues ON leagues.id = league_id
|
LEFT JOIN leagues ON leagues.id = league_id
|
||||||
|
|
@ -113,7 +113,7 @@ type GetExpiredUpcomingEventsRow struct {
|
||||||
FetchedAt pgtype.Timestamp `json:"fetched_at"`
|
FetchedAt pgtype.Timestamp `json:"fetched_at"`
|
||||||
Source pgtype.Text `json:"source"`
|
Source pgtype.Text `json:"source"`
|
||||||
IsFeatured bool `json:"is_featured"`
|
IsFeatured bool `json:"is_featured"`
|
||||||
IsMonitorred bool `json:"is_monitorred"`
|
IsMonitored bool `json:"is_monitored"`
|
||||||
IsActive bool `json:"is_active"`
|
IsActive bool `json:"is_active"`
|
||||||
LeagueCc_2 pgtype.Text `json:"league_cc_2"`
|
LeagueCc_2 pgtype.Text `json:"league_cc_2"`
|
||||||
}
|
}
|
||||||
|
|
@ -151,7 +151,7 @@ func (q *Queries) GetExpiredUpcomingEvents(ctx context.Context, status pgtype.Te
|
||||||
&i.FetchedAt,
|
&i.FetchedAt,
|
||||||
&i.Source,
|
&i.Source,
|
||||||
&i.IsFeatured,
|
&i.IsFeatured,
|
||||||
&i.IsMonitorred,
|
&i.IsMonitored,
|
||||||
&i.IsActive,
|
&i.IsActive,
|
||||||
&i.LeagueCc_2,
|
&i.LeagueCc_2,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
|
|
@ -166,7 +166,7 @@ func (q *Queries) GetExpiredUpcomingEvents(ctx context.Context, status pgtype.Te
|
||||||
}
|
}
|
||||||
|
|
||||||
const GetPaginatedUpcomingEvents = `-- name: GetPaginatedUpcomingEvents :many
|
const GetPaginatedUpcomingEvents = `-- name: GetPaginatedUpcomingEvents :many
|
||||||
SELECT events.id, events.sport_id, events.match_name, events.home_team, events.away_team, events.home_team_id, events.away_team_id, events.home_kit_image, events.away_kit_image, events.league_id, events.league_name, events.league_cc, events.start_time, events.score, events.match_minute, events.timer_status, events.added_time, events.match_period, events.is_live, events.status, events.fetched_at, events.source, events.is_featured, events.is_monitorred, events.is_active,
|
SELECT events.id, events.sport_id, events.match_name, events.home_team, events.away_team, events.home_team_id, events.away_team_id, events.home_kit_image, events.away_kit_image, events.league_id, events.league_name, events.league_cc, events.start_time, events.score, events.match_minute, events.timer_status, events.added_time, events.match_period, events.is_live, events.status, events.fetched_at, events.source, events.is_featured, events.is_monitored, events.is_active,
|
||||||
leagues.country_code as league_cc
|
leagues.country_code as league_cc
|
||||||
FROM events
|
FROM events
|
||||||
LEFT JOIN leagues ON leagues.id = league_id
|
LEFT JOIN leagues ON leagues.id = league_id
|
||||||
|
|
@ -242,7 +242,7 @@ type GetPaginatedUpcomingEventsRow struct {
|
||||||
FetchedAt pgtype.Timestamp `json:"fetched_at"`
|
FetchedAt pgtype.Timestamp `json:"fetched_at"`
|
||||||
Source pgtype.Text `json:"source"`
|
Source pgtype.Text `json:"source"`
|
||||||
IsFeatured bool `json:"is_featured"`
|
IsFeatured bool `json:"is_featured"`
|
||||||
IsMonitorred bool `json:"is_monitorred"`
|
IsMonitored bool `json:"is_monitored"`
|
||||||
IsActive bool `json:"is_active"`
|
IsActive bool `json:"is_active"`
|
||||||
LeagueCc_2 pgtype.Text `json:"league_cc_2"`
|
LeagueCc_2 pgtype.Text `json:"league_cc_2"`
|
||||||
}
|
}
|
||||||
|
|
@ -290,7 +290,7 @@ func (q *Queries) GetPaginatedUpcomingEvents(ctx context.Context, arg GetPaginat
|
||||||
&i.FetchedAt,
|
&i.FetchedAt,
|
||||||
&i.Source,
|
&i.Source,
|
||||||
&i.IsFeatured,
|
&i.IsFeatured,
|
||||||
&i.IsMonitorred,
|
&i.IsMonitored,
|
||||||
&i.IsActive,
|
&i.IsActive,
|
||||||
&i.LeagueCc_2,
|
&i.LeagueCc_2,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
|
|
@ -367,7 +367,7 @@ func (q *Queries) GetTotalEvents(ctx context.Context, arg GetTotalEventsParams)
|
||||||
}
|
}
|
||||||
|
|
||||||
const GetUpcomingByID = `-- name: GetUpcomingByID :one
|
const GetUpcomingByID = `-- name: GetUpcomingByID :one
|
||||||
SELECT id, sport_id, match_name, home_team, away_team, home_team_id, away_team_id, home_kit_image, away_kit_image, league_id, league_name, league_cc, start_time, score, match_minute, timer_status, added_time, match_period, is_live, status, fetched_at, source, is_featured, is_monitorred, is_active
|
SELECT id, sport_id, match_name, home_team, away_team, home_team_id, away_team_id, home_kit_image, away_kit_image, league_id, league_name, league_cc, start_time, score, match_minute, timer_status, added_time, match_period, is_live, status, fetched_at, source, is_featured, is_monitored, is_active
|
||||||
FROM events
|
FROM events
|
||||||
WHERE id = $1
|
WHERE id = $1
|
||||||
AND is_live = false
|
AND is_live = false
|
||||||
|
|
@ -402,7 +402,7 @@ func (q *Queries) GetUpcomingByID(ctx context.Context, id string) (Event, error)
|
||||||
&i.FetchedAt,
|
&i.FetchedAt,
|
||||||
&i.Source,
|
&i.Source,
|
||||||
&i.IsFeatured,
|
&i.IsFeatured,
|
||||||
&i.IsMonitorred,
|
&i.IsMonitored,
|
||||||
&i.IsActive,
|
&i.IsActive,
|
||||||
)
|
)
|
||||||
return i, err
|
return i, err
|
||||||
|
|
@ -623,6 +623,19 @@ func (q *Queries) InsertUpcomingEvent(ctx context.Context, arg InsertUpcomingEve
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const IsEventMonitored = `-- name: IsEventMonitored :one
|
||||||
|
SELECT is_monitored
|
||||||
|
FROM events
|
||||||
|
WHERE id = $1
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *Queries) IsEventMonitored(ctx context.Context, id string) (bool, error) {
|
||||||
|
row := q.db.QueryRow(ctx, IsEventMonitored, id)
|
||||||
|
var is_monitored bool
|
||||||
|
err := row.Scan(&is_monitored)
|
||||||
|
return is_monitored, err
|
||||||
|
}
|
||||||
|
|
||||||
const ListLiveEvents = `-- name: ListLiveEvents :many
|
const ListLiveEvents = `-- name: ListLiveEvents :many
|
||||||
SELECT id
|
SELECT id
|
||||||
FROM events
|
FROM events
|
||||||
|
|
@ -649,19 +662,35 @@ func (q *Queries) ListLiveEvents(ctx context.Context) ([]string, error) {
|
||||||
return items, nil
|
return items, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
const UpdateFeatured = `-- name: UpdateFeatured :exec
|
const UpdateEventFeatured = `-- name: UpdateEventFeatured :exec
|
||||||
UPDATE events
|
UPDATE events
|
||||||
SET is_featured = $1
|
SET is_featured = $1
|
||||||
WHERE id = $2
|
WHERE id = $2
|
||||||
`
|
`
|
||||||
|
|
||||||
type UpdateFeaturedParams struct {
|
type UpdateEventFeaturedParams struct {
|
||||||
IsFeatured bool `json:"is_featured"`
|
IsFeatured bool `json:"is_featured"`
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) UpdateFeatured(ctx context.Context, arg UpdateFeaturedParams) error {
|
func (q *Queries) UpdateEventFeatured(ctx context.Context, arg UpdateEventFeaturedParams) error {
|
||||||
_, err := q.db.Exec(ctx, UpdateFeatured, arg.IsFeatured, arg.ID)
|
_, err := q.db.Exec(ctx, UpdateEventFeatured, arg.IsFeatured, arg.ID)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
const UpdateEventMonitored = `-- name: UpdateEventMonitored :exec
|
||||||
|
UPDATE events
|
||||||
|
SET is_monitored = $1
|
||||||
|
WHERE id = $2
|
||||||
|
`
|
||||||
|
|
||||||
|
type UpdateEventMonitoredParams struct {
|
||||||
|
IsMonitored bool `json:"is_monitored"`
|
||||||
|
ID string `json:"id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) UpdateEventMonitored(ctx context.Context, arg UpdateEventMonitoredParams) error {
|
||||||
|
_, err := q.db.Exec(ctx, UpdateEventMonitored, arg.IsMonitored, arg.ID)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -258,10 +258,17 @@ type Event struct {
|
||||||
FetchedAt pgtype.Timestamp `json:"fetched_at"`
|
FetchedAt pgtype.Timestamp `json:"fetched_at"`
|
||||||
Source pgtype.Text `json:"source"`
|
Source pgtype.Text `json:"source"`
|
||||||
IsFeatured bool `json:"is_featured"`
|
IsFeatured bool `json:"is_featured"`
|
||||||
IsMonitorred bool `json:"is_monitorred"`
|
IsMonitored bool `json:"is_monitored"`
|
||||||
IsActive bool `json:"is_active"`
|
IsActive bool `json:"is_active"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type EventHistory struct {
|
||||||
|
ID int64 `json:"id"`
|
||||||
|
EventID string `json:"event_id"`
|
||||||
|
Status string `json:"status"`
|
||||||
|
CreatedAt pgtype.Timestamp `json:"created_at"`
|
||||||
|
}
|
||||||
|
|
||||||
type ExchangeRate struct {
|
type ExchangeRate struct {
|
||||||
ID int32 `json:"id"`
|
ID int32 `json:"id"`
|
||||||
FromCurrency string `json:"from_currency"`
|
FromCurrency string `json:"from_currency"`
|
||||||
|
|
@ -316,7 +323,7 @@ type Notification struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type Odd struct {
|
type Odd struct {
|
||||||
ID int32 `json:"id"`
|
ID int64 `json:"id"`
|
||||||
EventID pgtype.Text `json:"event_id"`
|
EventID pgtype.Text `json:"event_id"`
|
||||||
Fi pgtype.Text `json:"fi"`
|
Fi pgtype.Text `json:"fi"`
|
||||||
MarketType string `json:"market_type"`
|
MarketType string `json:"market_type"`
|
||||||
|
|
@ -334,6 +341,16 @@ type Odd struct {
|
||||||
IsActive pgtype.Bool `json:"is_active"`
|
IsActive pgtype.Bool `json:"is_active"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type OddHistory struct {
|
||||||
|
ID int64 `json:"id"`
|
||||||
|
OddID int64 `json:"odd_id"`
|
||||||
|
RawOddID int64 `json:"raw_odd_id"`
|
||||||
|
MarketID string `json:"market_id"`
|
||||||
|
EventID string `json:"event_id"`
|
||||||
|
OddValue float64 `json:"odd_value"`
|
||||||
|
CreatedAt pgtype.Timestamp `json:"created_at"`
|
||||||
|
}
|
||||||
|
|
||||||
type Otp struct {
|
type Otp struct {
|
||||||
ID int64 `json:"id"`
|
ID int64 `json:"id"`
|
||||||
SentTo string `json:"sent_to"`
|
SentTo string `json:"sent_to"`
|
||||||
|
|
|
||||||
201
gen/db/odd_history.sql.go
Normal file
201
gen/db/odd_history.sql.go
Normal file
|
|
@ -0,0 +1,201 @@
|
||||||
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
|
// versions:
|
||||||
|
// sqlc v1.29.0
|
||||||
|
// source: odd_history.sql
|
||||||
|
|
||||||
|
package dbgen
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/jackc/pgx/v5/pgtype"
|
||||||
|
)
|
||||||
|
|
||||||
|
const GetAllOddHistory = `-- name: GetAllOddHistory :many
|
||||||
|
SELECT id, odd_id, raw_odd_id, market_id, event_id, odd_value, created_at
|
||||||
|
FROM odd_history
|
||||||
|
WHERE (
|
||||||
|
odd_id = $1
|
||||||
|
OR $1 IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
market_id = $2
|
||||||
|
OR $2 IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
raw_odd_id = $3
|
||||||
|
OR $3 IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
event_id = $4
|
||||||
|
OR $4 IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
created_at > $5
|
||||||
|
OR $5 IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
created_at < $6
|
||||||
|
OR $6 IS NULL
|
||||||
|
)
|
||||||
|
`
|
||||||
|
|
||||||
|
type GetAllOddHistoryParams struct {
|
||||||
|
OddID pgtype.Int8 `json:"odd_id"`
|
||||||
|
MarketID pgtype.Text `json:"market_id"`
|
||||||
|
RawOddID pgtype.Int8 `json:"raw_odd_id"`
|
||||||
|
EventID pgtype.Text `json:"event_id"`
|
||||||
|
CreatedBefore pgtype.Timestamp `json:"created_before"`
|
||||||
|
CreatedAfter pgtype.Timestamp `json:"created_after"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) GetAllOddHistory(ctx context.Context, arg GetAllOddHistoryParams) ([]OddHistory, error) {
|
||||||
|
rows, err := q.db.Query(ctx, GetAllOddHistory,
|
||||||
|
arg.OddID,
|
||||||
|
arg.MarketID,
|
||||||
|
arg.RawOddID,
|
||||||
|
arg.EventID,
|
||||||
|
arg.CreatedBefore,
|
||||||
|
arg.CreatedAfter,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []OddHistory
|
||||||
|
for rows.Next() {
|
||||||
|
var i OddHistory
|
||||||
|
if err := rows.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.OddID,
|
||||||
|
&i.RawOddID,
|
||||||
|
&i.MarketID,
|
||||||
|
&i.EventID,
|
||||||
|
&i.OddValue,
|
||||||
|
&i.CreatedAt,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const GetInitialOddPerDay = `-- name: GetInitialOddPerDay :many
|
||||||
|
SELECT DISTINCT ON (DATE_TRUNC('day', created_at)) id, odd_id, raw_odd_id, market_id, event_id, odd_value, created_at
|
||||||
|
FROM odd_history
|
||||||
|
WHERE (
|
||||||
|
odd_id = $1
|
||||||
|
OR $1 IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
market_id = $2
|
||||||
|
OR $2 IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
raw_odd_id = $3
|
||||||
|
OR $3 IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
event_id = $4
|
||||||
|
OR $4 IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
created_at > $5
|
||||||
|
OR $5 IS NULL
|
||||||
|
)
|
||||||
|
AND (
|
||||||
|
created_at < $6
|
||||||
|
OR $6 IS NULL
|
||||||
|
)
|
||||||
|
ORDER BY DATE_TRUNC('day', created_at),
|
||||||
|
created_at ASC
|
||||||
|
`
|
||||||
|
|
||||||
|
type GetInitialOddPerDayParams struct {
|
||||||
|
OddID pgtype.Int8 `json:"odd_id"`
|
||||||
|
MarketID pgtype.Text `json:"market_id"`
|
||||||
|
RawOddID pgtype.Int8 `json:"raw_odd_id"`
|
||||||
|
EventID pgtype.Text `json:"event_id"`
|
||||||
|
CreatedBefore pgtype.Timestamp `json:"created_before"`
|
||||||
|
CreatedAfter pgtype.Timestamp `json:"created_after"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) GetInitialOddPerDay(ctx context.Context, arg GetInitialOddPerDayParams) ([]OddHistory, error) {
|
||||||
|
rows, err := q.db.Query(ctx, GetInitialOddPerDay,
|
||||||
|
arg.OddID,
|
||||||
|
arg.MarketID,
|
||||||
|
arg.RawOddID,
|
||||||
|
arg.EventID,
|
||||||
|
arg.CreatedBefore,
|
||||||
|
arg.CreatedAfter,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []OddHistory
|
||||||
|
for rows.Next() {
|
||||||
|
var i OddHistory
|
||||||
|
if err := rows.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.OddID,
|
||||||
|
&i.RawOddID,
|
||||||
|
&i.MarketID,
|
||||||
|
&i.EventID,
|
||||||
|
&i.OddValue,
|
||||||
|
&i.CreatedAt,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const InsertOddHistory = `-- name: InsertOddHistory :one
|
||||||
|
INSERT INTO odd_history (
|
||||||
|
odd_id,
|
||||||
|
market_id,
|
||||||
|
raw_odd_id,
|
||||||
|
event_id,
|
||||||
|
odd_value
|
||||||
|
)
|
||||||
|
VALUES ($1, $2, $3, $4, $5)
|
||||||
|
RETURNING id, odd_id, raw_odd_id, market_id, event_id, odd_value, created_at
|
||||||
|
`
|
||||||
|
|
||||||
|
type InsertOddHistoryParams struct {
|
||||||
|
OddID int64 `json:"odd_id"`
|
||||||
|
MarketID string `json:"market_id"`
|
||||||
|
RawOddID int64 `json:"raw_odd_id"`
|
||||||
|
EventID string `json:"event_id"`
|
||||||
|
OddValue float64 `json:"odd_value"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) InsertOddHistory(ctx context.Context, arg InsertOddHistoryParams) (OddHistory, error) {
|
||||||
|
row := q.db.QueryRow(ctx, InsertOddHistory,
|
||||||
|
arg.OddID,
|
||||||
|
arg.MarketID,
|
||||||
|
arg.RawOddID,
|
||||||
|
arg.EventID,
|
||||||
|
arg.OddValue,
|
||||||
|
)
|
||||||
|
var i OddHistory
|
||||||
|
err := row.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.OddID,
|
||||||
|
&i.RawOddID,
|
||||||
|
&i.MarketID,
|
||||||
|
&i.EventID,
|
||||||
|
&i.OddValue,
|
||||||
|
&i.CreatedAt,
|
||||||
|
)
|
||||||
|
return i, err
|
||||||
|
}
|
||||||
|
|
@ -22,54 +22,23 @@ func (q *Queries) DeleteOddsForEvent(ctx context.Context, fi pgtype.Text) error
|
||||||
}
|
}
|
||||||
|
|
||||||
const GetALLPrematchOdds = `-- name: GetALLPrematchOdds :many
|
const GetALLPrematchOdds = `-- name: GetALLPrematchOdds :many
|
||||||
SELECT event_id,
|
SELECT id, event_id, fi, market_type, market_name, market_category, market_id, name, handicap, odds_value, section, category, raw_odds, fetched_at, source, is_active
|
||||||
fi,
|
|
||||||
market_type,
|
|
||||||
market_name,
|
|
||||||
market_category,
|
|
||||||
market_id,
|
|
||||||
name,
|
|
||||||
handicap,
|
|
||||||
odds_value,
|
|
||||||
section,
|
|
||||||
category,
|
|
||||||
raw_odds,
|
|
||||||
fetched_at,
|
|
||||||
source,
|
|
||||||
is_active
|
|
||||||
FROM odds
|
FROM odds
|
||||||
WHERE is_active = true
|
WHERE is_active = true
|
||||||
AND source = 'bet365'
|
AND source = 'bet365'
|
||||||
`
|
`
|
||||||
|
|
||||||
type GetALLPrematchOddsRow struct {
|
func (q *Queries) GetALLPrematchOdds(ctx context.Context) ([]Odd, error) {
|
||||||
EventID pgtype.Text `json:"event_id"`
|
|
||||||
Fi pgtype.Text `json:"fi"`
|
|
||||||
MarketType string `json:"market_type"`
|
|
||||||
MarketName pgtype.Text `json:"market_name"`
|
|
||||||
MarketCategory pgtype.Text `json:"market_category"`
|
|
||||||
MarketID pgtype.Text `json:"market_id"`
|
|
||||||
Name pgtype.Text `json:"name"`
|
|
||||||
Handicap pgtype.Text `json:"handicap"`
|
|
||||||
OddsValue pgtype.Float8 `json:"odds_value"`
|
|
||||||
Section string `json:"section"`
|
|
||||||
Category pgtype.Text `json:"category"`
|
|
||||||
RawOdds []byte `json:"raw_odds"`
|
|
||||||
FetchedAt pgtype.Timestamp `json:"fetched_at"`
|
|
||||||
Source pgtype.Text `json:"source"`
|
|
||||||
IsActive pgtype.Bool `json:"is_active"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (q *Queries) GetALLPrematchOdds(ctx context.Context) ([]GetALLPrematchOddsRow, error) {
|
|
||||||
rows, err := q.db.Query(ctx, GetALLPrematchOdds)
|
rows, err := q.db.Query(ctx, GetALLPrematchOdds)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer rows.Close()
|
defer rows.Close()
|
||||||
var items []GetALLPrematchOddsRow
|
var items []Odd
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var i GetALLPrematchOddsRow
|
var i Odd
|
||||||
if err := rows.Scan(
|
if err := rows.Scan(
|
||||||
|
&i.ID,
|
||||||
&i.EventID,
|
&i.EventID,
|
||||||
&i.Fi,
|
&i.Fi,
|
||||||
&i.MarketType,
|
&i.MarketType,
|
||||||
|
|
@ -96,6 +65,44 @@ func (q *Queries) GetALLPrematchOdds(ctx context.Context) ([]GetALLPrematchOddsR
|
||||||
return items, nil
|
return items, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const GetOddsByMarketID = `-- name: GetOddsByMarketID :one
|
||||||
|
SELECT id, event_id, fi, market_type, market_name, market_category, market_id, name, handicap, odds_value, section, category, raw_odds, fetched_at, source, is_active
|
||||||
|
FROM odds
|
||||||
|
WHERE market_id = $1
|
||||||
|
AND fi = $2
|
||||||
|
AND is_active = true
|
||||||
|
AND source = 'bet365'
|
||||||
|
`
|
||||||
|
|
||||||
|
type GetOddsByMarketIDParams struct {
|
||||||
|
MarketID pgtype.Text `json:"market_id"`
|
||||||
|
Fi pgtype.Text `json:"fi"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) GetOddsByMarketID(ctx context.Context, arg GetOddsByMarketIDParams) (Odd, error) {
|
||||||
|
row := q.db.QueryRow(ctx, GetOddsByMarketID, arg.MarketID, arg.Fi)
|
||||||
|
var i Odd
|
||||||
|
err := row.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.EventID,
|
||||||
|
&i.Fi,
|
||||||
|
&i.MarketType,
|
||||||
|
&i.MarketName,
|
||||||
|
&i.MarketCategory,
|
||||||
|
&i.MarketID,
|
||||||
|
&i.Name,
|
||||||
|
&i.Handicap,
|
||||||
|
&i.OddsValue,
|
||||||
|
&i.Section,
|
||||||
|
&i.Category,
|
||||||
|
&i.RawOdds,
|
||||||
|
&i.FetchedAt,
|
||||||
|
&i.Source,
|
||||||
|
&i.IsActive,
|
||||||
|
)
|
||||||
|
return i, err
|
||||||
|
}
|
||||||
|
|
||||||
const GetPaginatedPrematchOddsByUpcomingID = `-- name: GetPaginatedPrematchOddsByUpcomingID :many
|
const GetPaginatedPrematchOddsByUpcomingID = `-- name: GetPaginatedPrematchOddsByUpcomingID :many
|
||||||
SELECT o.id, o.event_id, o.fi, o.market_type, o.market_name, o.market_category, o.market_id, o.name, o.handicap, o.odds_value, o.section, o.category, o.raw_odds, o.fetched_at, o.source, o.is_active
|
SELECT o.id, o.event_id, o.fi, o.market_type, o.market_name, o.market_category, o.market_id, o.name, o.handicap, o.odds_value, o.section, o.category, o.raw_odds, o.fetched_at, o.source, o.is_active
|
||||||
FROM odds o
|
FROM odds o
|
||||||
|
|
@ -152,54 +159,23 @@ func (q *Queries) GetPaginatedPrematchOddsByUpcomingID(ctx context.Context, arg
|
||||||
}
|
}
|
||||||
|
|
||||||
const GetPrematchOdds = `-- name: GetPrematchOdds :many
|
const GetPrematchOdds = `-- name: GetPrematchOdds :many
|
||||||
SELECT event_id,
|
SELECT id, event_id, fi, market_type, market_name, market_category, market_id, name, handicap, odds_value, section, category, raw_odds, fetched_at, source, is_active
|
||||||
fi,
|
|
||||||
market_type,
|
|
||||||
market_name,
|
|
||||||
market_category,
|
|
||||||
market_id,
|
|
||||||
name,
|
|
||||||
handicap,
|
|
||||||
odds_value,
|
|
||||||
section,
|
|
||||||
category,
|
|
||||||
raw_odds,
|
|
||||||
fetched_at,
|
|
||||||
source,
|
|
||||||
is_active
|
|
||||||
FROM odds
|
FROM odds
|
||||||
WHERE is_active = true
|
WHERE is_active = true
|
||||||
AND source = 'bet365'
|
AND source = 'bet365'
|
||||||
`
|
`
|
||||||
|
|
||||||
type GetPrematchOddsRow struct {
|
func (q *Queries) GetPrematchOdds(ctx context.Context) ([]Odd, error) {
|
||||||
EventID pgtype.Text `json:"event_id"`
|
|
||||||
Fi pgtype.Text `json:"fi"`
|
|
||||||
MarketType string `json:"market_type"`
|
|
||||||
MarketName pgtype.Text `json:"market_name"`
|
|
||||||
MarketCategory pgtype.Text `json:"market_category"`
|
|
||||||
MarketID pgtype.Text `json:"market_id"`
|
|
||||||
Name pgtype.Text `json:"name"`
|
|
||||||
Handicap pgtype.Text `json:"handicap"`
|
|
||||||
OddsValue pgtype.Float8 `json:"odds_value"`
|
|
||||||
Section string `json:"section"`
|
|
||||||
Category pgtype.Text `json:"category"`
|
|
||||||
RawOdds []byte `json:"raw_odds"`
|
|
||||||
FetchedAt pgtype.Timestamp `json:"fetched_at"`
|
|
||||||
Source pgtype.Text `json:"source"`
|
|
||||||
IsActive pgtype.Bool `json:"is_active"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (q *Queries) GetPrematchOdds(ctx context.Context) ([]GetPrematchOddsRow, error) {
|
|
||||||
rows, err := q.db.Query(ctx, GetPrematchOdds)
|
rows, err := q.db.Query(ctx, GetPrematchOdds)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer rows.Close()
|
defer rows.Close()
|
||||||
var items []GetPrematchOddsRow
|
var items []Odd
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var i GetPrematchOddsRow
|
var i Odd
|
||||||
if err := rows.Scan(
|
if err := rows.Scan(
|
||||||
|
&i.ID,
|
||||||
&i.EventID,
|
&i.EventID,
|
||||||
&i.Fi,
|
&i.Fi,
|
||||||
&i.MarketType,
|
&i.MarketType,
|
||||||
|
|
@ -274,45 +250,6 @@ func (q *Queries) GetPrematchOddsByUpcomingID(ctx context.Context, id string) ([
|
||||||
return items, nil
|
return items, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
const GetRawOddsByMarketID = `-- name: GetRawOddsByMarketID :one
|
|
||||||
SELECT id,
|
|
||||||
market_name,
|
|
||||||
handicap,
|
|
||||||
raw_odds,
|
|
||||||
fetched_at
|
|
||||||
FROM odds
|
|
||||||
WHERE market_id = $1
|
|
||||||
AND fi = $2
|
|
||||||
AND is_active = true
|
|
||||||
AND source = 'bet365'
|
|
||||||
`
|
|
||||||
|
|
||||||
type GetRawOddsByMarketIDParams struct {
|
|
||||||
MarketID pgtype.Text `json:"market_id"`
|
|
||||||
Fi pgtype.Text `json:"fi"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type GetRawOddsByMarketIDRow struct {
|
|
||||||
ID int32 `json:"id"`
|
|
||||||
MarketName pgtype.Text `json:"market_name"`
|
|
||||||
Handicap pgtype.Text `json:"handicap"`
|
|
||||||
RawOdds []byte `json:"raw_odds"`
|
|
||||||
FetchedAt pgtype.Timestamp `json:"fetched_at"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (q *Queries) GetRawOddsByMarketID(ctx context.Context, arg GetRawOddsByMarketIDParams) (GetRawOddsByMarketIDRow, error) {
|
|
||||||
row := q.db.QueryRow(ctx, GetRawOddsByMarketID, arg.MarketID, arg.Fi)
|
|
||||||
var i GetRawOddsByMarketIDRow
|
|
||||||
err := row.Scan(
|
|
||||||
&i.ID,
|
|
||||||
&i.MarketName,
|
|
||||||
&i.Handicap,
|
|
||||||
&i.RawOdds,
|
|
||||||
&i.FetchedAt,
|
|
||||||
)
|
|
||||||
return i, err
|
|
||||||
}
|
|
||||||
|
|
||||||
const InsertNonLiveOdd = `-- name: InsertNonLiveOdd :exec
|
const InsertNonLiveOdd = `-- name: InsertNonLiveOdd :exec
|
||||||
INSERT INTO odds (
|
INSERT INTO odds (
|
||||||
event_id,
|
event_id,
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ import (
|
||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/jackc/pgx/v5/pgtype"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -42,6 +43,62 @@ type ValidBool struct {
|
||||||
Valid bool
|
Valid bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ValidInt64 → pgtype.Int8
|
||||||
|
func (v ValidInt64) ToPG() pgtype.Int8 {
|
||||||
|
return pgtype.Int8{
|
||||||
|
Int64: v.Value,
|
||||||
|
Valid: v.Valid,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidInt32 → pgtype.Int4
|
||||||
|
func (v ValidInt32) ToPG() pgtype.Int4 {
|
||||||
|
return pgtype.Int4{
|
||||||
|
Int32: v.Value,
|
||||||
|
Valid: v.Valid,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidInt → pgtype.Int4 (Go int mapped to int32 for pg compatibility)
|
||||||
|
func (v ValidInt) ToPG() pgtype.Int4 {
|
||||||
|
return pgtype.Int4{
|
||||||
|
Int32: int32(v.Value),
|
||||||
|
Valid: v.Valid,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidFloat32 → pgtype.Float4
|
||||||
|
func (v ValidFloat32) ToPG() pgtype.Float4 {
|
||||||
|
return pgtype.Float4{
|
||||||
|
Float32: v.Value,
|
||||||
|
Valid: v.Valid,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidString → pgtype.Text
|
||||||
|
func (v ValidString) ToPG() pgtype.Text {
|
||||||
|
return pgtype.Text{
|
||||||
|
String: v.Value,
|
||||||
|
Valid: v.Valid,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidTime → pgtype.Timestamp
|
||||||
|
func (v ValidTime) ToPG() pgtype.Timestamp {
|
||||||
|
return pgtype.Timestamp{
|
||||||
|
Time: v.Value,
|
||||||
|
Valid: v.Valid,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidBool → pgtype.Bool
|
||||||
|
func (v ValidBool) ToPG() pgtype.Bool {
|
||||||
|
return pgtype.Bool{
|
||||||
|
Bool: v.Value,
|
||||||
|
Valid: v.Valid,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
type Currency int64
|
type Currency int64
|
||||||
|
|
||||||
// ToCurrency converts a float32 to Currency
|
// ToCurrency converts a float32 to Currency
|
||||||
|
|
@ -163,5 +220,3 @@ func (n *NullableInt64JSON) UnmarshalJSON(data []byte) error {
|
||||||
|
|
||||||
return fmt.Errorf("invalid int64 value: %s", string(data))
|
return fmt.Errorf("invalid int64 value: %s", string(data))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -102,7 +102,8 @@ type UpcomingEvent struct {
|
||||||
Source string `json:"source"` // bet api provider (bet365, betfair)
|
Source string `json:"source"` // bet api provider (bet365, betfair)
|
||||||
Status EventStatus `json:"status"` //Match Status for event
|
Status EventStatus `json:"status"` //Match Status for event
|
||||||
IsFeatured bool `json:"is_featured"` //Whether the event is featured or not
|
IsFeatured bool `json:"is_featured"` //Whether the event is featured or not
|
||||||
IsActive bool `json:"is_active"` //Whether the event is featured or not
|
IsMonitored bool `json:"is_monitored"` //Whether the event is monitored or not
|
||||||
|
IsActive bool `json:"is_active"` //Whether the event is active or not
|
||||||
}
|
}
|
||||||
type MatchResult struct {
|
type MatchResult struct {
|
||||||
EventID string
|
EventID string
|
||||||
|
|
@ -112,14 +113,6 @@ type MatchResult struct {
|
||||||
Scores map[string]map[string]string
|
Scores map[string]map[string]string
|
||||||
}
|
}
|
||||||
|
|
||||||
type Odds struct {
|
|
||||||
ID int64 `json:"id"`
|
|
||||||
EventID string `json:"event_id"`
|
|
||||||
MarketType string `json:"market_type"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
HitStatus string `json:"hit_status"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type EventFilter struct {
|
type EventFilter struct {
|
||||||
Query ValidString
|
Query ValidString
|
||||||
SportID ValidInt32
|
SportID ValidInt32
|
||||||
|
|
|
||||||
41
internal/domain/event_history.go
Normal file
41
internal/domain/event_history.go
Normal file
|
|
@ -0,0 +1,41 @@
|
||||||
|
package domain
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
dbgen "github.com/SamuelTariku/FortuneBet-Backend/gen/db"
|
||||||
|
)
|
||||||
|
|
||||||
|
type EventHistory struct {
|
||||||
|
ID int64
|
||||||
|
EventID string
|
||||||
|
Status string
|
||||||
|
CreatedAt time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
type CreateEventHistory struct {
|
||||||
|
EventID string
|
||||||
|
Status string
|
||||||
|
}
|
||||||
|
|
||||||
|
type EventHistoryFilter struct {
|
||||||
|
EventID ValidString
|
||||||
|
CreatedBefore ValidTime
|
||||||
|
CreatedAfter ValidTime
|
||||||
|
}
|
||||||
|
|
||||||
|
func ConvertCreateEventHistory(eventHistory CreateEventHistory) dbgen.InsertEventHistoryParams {
|
||||||
|
return dbgen.InsertEventHistoryParams{
|
||||||
|
EventID: eventHistory.EventID,
|
||||||
|
Status: eventHistory.Status,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ConvertDBEventHistory(eventHistory dbgen.EventHistory) EventHistory {
|
||||||
|
return EventHistory{
|
||||||
|
ID: eventHistory.ID,
|
||||||
|
EventID: eventHistory.EventID,
|
||||||
|
Status: eventHistory.Status,
|
||||||
|
CreatedAt: eventHistory.CreatedAt.Time,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,11 +1,10 @@
|
||||||
package domain
|
package domain
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/json"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
type RawMessage interface{}
|
|
||||||
|
|
||||||
type Market struct {
|
type Market struct {
|
||||||
EventID string
|
EventID string
|
||||||
FI string
|
FI string
|
||||||
|
|
@ -22,6 +21,7 @@ type Market struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type Odd struct {
|
type Odd struct {
|
||||||
|
ID int64 `json:"id"`
|
||||||
EventID string `json:"event_id"`
|
EventID string `json:"event_id"`
|
||||||
Fi string `json:"fi"`
|
Fi string `json:"fi"`
|
||||||
MarketType string `json:"market_type"`
|
MarketType string `json:"market_type"`
|
||||||
|
|
@ -33,7 +33,7 @@ type Odd struct {
|
||||||
OddsValue float64 `json:"odds_value"`
|
OddsValue float64 `json:"odds_value"`
|
||||||
Section string `json:"section"`
|
Section string `json:"section"`
|
||||||
Category string `json:"category"`
|
Category string `json:"category"`
|
||||||
RawOdds []RawMessage `json:"raw_odds"`
|
RawOdds []json.RawMessage `json:"raw_odds"`
|
||||||
FetchedAt time.Time `json:"fetched_at"`
|
FetchedAt time.Time `json:"fetched_at"`
|
||||||
Source string `json:"source"`
|
Source string `json:"source"`
|
||||||
IsActive bool `json:"is_active"`
|
IsActive bool `json:"is_active"`
|
||||||
|
|
@ -42,8 +42,6 @@ type RawOddsByMarketID struct {
|
||||||
ID int64 `json:"id"`
|
ID int64 `json:"id"`
|
||||||
MarketName string `json:"market_name"`
|
MarketName string `json:"market_name"`
|
||||||
Handicap string `json:"handicap"`
|
Handicap string `json:"handicap"`
|
||||||
RawOdds []RawMessage `json:"raw_odds"`
|
RawOdds []json.RawMessage `json:"raw_odds"`
|
||||||
FetchedAt time.Time `json:"fetched_at"`
|
FetchedAt time.Time `json:"fetched_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
56
internal/domain/odds_history.go
Normal file
56
internal/domain/odds_history.go
Normal file
|
|
@ -0,0 +1,56 @@
|
||||||
|
package domain
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
dbgen "github.com/SamuelTariku/FortuneBet-Backend/gen/db"
|
||||||
|
)
|
||||||
|
|
||||||
|
type OddHistory struct {
|
||||||
|
ID int64
|
||||||
|
OddID int64
|
||||||
|
MarketID string
|
||||||
|
RawOddID int64
|
||||||
|
EventID string
|
||||||
|
OddValue float64
|
||||||
|
CreatedAt time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
type CreateOddHistory struct {
|
||||||
|
OddID int64
|
||||||
|
MarketID string
|
||||||
|
RawOddID int64
|
||||||
|
EventID string
|
||||||
|
OddValue float64
|
||||||
|
}
|
||||||
|
|
||||||
|
type OddHistoryFilter struct {
|
||||||
|
OddID ValidInt64
|
||||||
|
MarketID ValidString
|
||||||
|
RawOddID ValidInt64
|
||||||
|
EventID ValidString
|
||||||
|
CreatedBefore ValidTime
|
||||||
|
CreatedAfter ValidTime
|
||||||
|
}
|
||||||
|
|
||||||
|
func ConvertCreateOddHistory(odd CreateOddHistory) dbgen.InsertOddHistoryParams {
|
||||||
|
return dbgen.InsertOddHistoryParams{
|
||||||
|
OddID: odd.OddID,
|
||||||
|
MarketID: odd.MarketID,
|
||||||
|
RawOddID: odd.RawOddID,
|
||||||
|
EventID: odd.EventID,
|
||||||
|
OddValue: odd.OddValue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ConvertDBOddHistory(dbOddHistory dbgen.OddHistory) OddHistory {
|
||||||
|
return OddHistory{
|
||||||
|
ID: dbOddHistory.ID,
|
||||||
|
OddID: dbOddHistory.OddID,
|
||||||
|
MarketID: dbOddHistory.MarketID,
|
||||||
|
RawOddID: dbOddHistory.RawOddID,
|
||||||
|
EventID: dbOddHistory.EventID,
|
||||||
|
OddValue: dbOddHistory.OddValue,
|
||||||
|
CreatedAt: dbOddHistory.CreatedAt.Time,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -9,7 +9,6 @@ import (
|
||||||
"github.com/jackc/pgx/v5/pgtype"
|
"github.com/jackc/pgx/v5/pgtype"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
func (s *Store) CreateBranch(ctx context.Context, branch domain.CreateBranch) (domain.Branch, error) {
|
func (s *Store) CreateBranch(ctx context.Context, branch domain.CreateBranch) (domain.Branch, error) {
|
||||||
|
|
||||||
dbBranch, err := s.queries.CreateBranch(ctx, domain.ConvertCreateBranch(branch))
|
dbBranch, err := s.queries.CreateBranch(ctx, domain.ConvertCreateBranch(branch))
|
||||||
|
|
@ -53,26 +52,11 @@ func (s *Store) GetBranchByCompanyID(ctx context.Context, companyID int64) ([]do
|
||||||
|
|
||||||
func (s *Store) GetAllBranches(ctx context.Context, filter domain.BranchFilter) ([]domain.BranchDetail, error) {
|
func (s *Store) GetAllBranches(ctx context.Context, filter domain.BranchFilter) ([]domain.BranchDetail, error) {
|
||||||
dbBranches, err := s.queries.GetAllBranches(ctx, dbgen.GetAllBranchesParams{
|
dbBranches, err := s.queries.GetAllBranches(ctx, dbgen.GetAllBranchesParams{
|
||||||
CompanyID: pgtype.Int8{
|
CompanyID: filter.CompanyID.ToPG(),
|
||||||
Int64: filter.CompanyID.Value,
|
BranchManagerID: filter.BranchManagerID.ToPG(),
|
||||||
Valid: filter.CompanyID.Valid,
|
Query: filter.Query.ToPG(),
|
||||||
},
|
CreatedBefore: filter.CreatedBefore.ToPG(),
|
||||||
BranchManagerID: pgtype.Int8{
|
CreatedAfter: filter.CreatedAfter.ToPG(),
|
||||||
Int64: filter.BranchManagerID.Value,
|
|
||||||
Valid: filter.BranchManagerID.Valid,
|
|
||||||
},
|
|
||||||
Query: pgtype.Text{
|
|
||||||
String: filter.Query.Value,
|
|
||||||
Valid: filter.Query.Valid,
|
|
||||||
},
|
|
||||||
CreatedBefore: pgtype.Timestamp{
|
|
||||||
Time: filter.CreatedBefore.Value,
|
|
||||||
Valid: filter.CreatedBefore.Valid,
|
|
||||||
},
|
|
||||||
CreatedAfter: pgtype.Timestamp{
|
|
||||||
Time: filter.CreatedAfter.Value,
|
|
||||||
Valid: filter.CreatedAfter.Valid,
|
|
||||||
},
|
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
|
||||||
|
|
@ -90,6 +90,8 @@ func (s *Store) GetAllUpcomingEvents(ctx context.Context) ([]domain.UpcomingEven
|
||||||
Source: e.Source.String,
|
Source: e.Source.String,
|
||||||
Status: domain.EventStatus(e.Status.String),
|
Status: domain.EventStatus(e.Status.String),
|
||||||
IsFeatured: e.IsFeatured,
|
IsFeatured: e.IsFeatured,
|
||||||
|
IsMonitored: e.IsMonitored,
|
||||||
|
IsActive: e.IsActive,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return upcomingEvents, nil
|
return upcomingEvents, nil
|
||||||
|
|
@ -193,6 +195,7 @@ func (s *Store) GetPaginatedUpcomingEvents(ctx context.Context, filter domain.Ev
|
||||||
Status: domain.EventStatus(e.Status.String),
|
Status: domain.EventStatus(e.Status.String),
|
||||||
IsFeatured: e.IsFeatured,
|
IsFeatured: e.IsFeatured,
|
||||||
IsActive: e.IsActive,
|
IsActive: e.IsActive,
|
||||||
|
IsMonitored: e.IsMonitored,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
totalCount, err := s.queries.GetTotalEvents(ctx, dbgen.GetTotalEventsParams{
|
totalCount, err := s.queries.GetTotalEvents(ctx, dbgen.GetTotalEventsParams{
|
||||||
|
|
@ -255,6 +258,8 @@ func (s *Store) GetUpcomingEventByID(ctx context.Context, ID string) (domain.Upc
|
||||||
Source: event.Source.String,
|
Source: event.Source.String,
|
||||||
Status: domain.EventStatus(event.Status.String),
|
Status: domain.EventStatus(event.Status.String),
|
||||||
IsFeatured: event.IsFeatured,
|
IsFeatured: event.IsFeatured,
|
||||||
|
IsActive: event.IsActive,
|
||||||
|
IsMonitored: event.IsMonitored,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
func (s *Store) UpdateFinalScore(ctx context.Context, eventID, fullScore string, status domain.EventStatus) error {
|
func (s *Store) UpdateFinalScore(ctx context.Context, eventID, fullScore string, status domain.EventStatus) error {
|
||||||
|
|
@ -290,13 +295,28 @@ func (s *Store) UpdateEventStatus(ctx context.Context, eventID string, status do
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Store) UpdateFeatured(ctx context.Context, eventID string, isFeatured bool) error {
|
func (s *Store) UpdateEventFeatured(ctx context.Context, eventID string, isFeatured bool) error {
|
||||||
return s.queries.UpdateFeatured(ctx, dbgen.UpdateFeaturedParams{
|
return s.queries.UpdateEventFeatured(ctx, dbgen.UpdateEventFeaturedParams{
|
||||||
ID: eventID,
|
ID: eventID,
|
||||||
IsFeatured: isFeatured,
|
IsFeatured: isFeatured,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *Store) IsEventMonitored(ctx context.Context, eventID string) (bool, error) {
|
||||||
|
isMonitored, err := s.queries.IsEventMonitored(ctx, eventID)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
return isMonitored, err
|
||||||
|
}
|
||||||
|
func (s *Store) UpdateEventMonitored(ctx context.Context, eventID string, IsMonitored bool) error {
|
||||||
|
return s.queries.UpdateEventMonitored(ctx, dbgen.UpdateEventMonitoredParams{
|
||||||
|
ID: eventID,
|
||||||
|
IsMonitored: IsMonitored,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func (s *Store) DeleteEvent(ctx context.Context, eventID string) error {
|
func (s *Store) DeleteEvent(ctx context.Context, eventID string) error {
|
||||||
err := s.queries.DeleteEvent(ctx, eventID)
|
err := s.queries.DeleteEvent(ctx, eventID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
||||||
55
internal/repository/event_history.go
Normal file
55
internal/repository/event_history.go
Normal file
|
|
@ -0,0 +1,55 @@
|
||||||
|
package repository
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
dbgen "github.com/SamuelTariku/FortuneBet-Backend/gen/db"
|
||||||
|
"github.com/SamuelTariku/FortuneBet-Backend/internal/domain"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (s *Store) InsertEventHistory(ctx context.Context, eventHistory domain.CreateEventHistory) (domain.EventHistory, error) {
|
||||||
|
dbEventHistory, err := s.queries.InsertEventHistory(ctx, domain.ConvertCreateEventHistory(eventHistory))
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return domain.EventHistory{}, fmt.Errorf("InsertEventHistory failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return domain.ConvertDBEventHistory(dbEventHistory), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertEventHistory(list []dbgen.EventHistory) []domain.EventHistory {
|
||||||
|
result := make([]domain.EventHistory, 0, len(list))
|
||||||
|
for _, item := range list {
|
||||||
|
result = append(result, domain.ConvertDBEventHistory(item))
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) GetAllEventHistory(ctx context.Context, filter domain.EventHistoryFilter) ([]domain.EventHistory, error) {
|
||||||
|
dbEventHistories, err := s.queries.GetAllEventHistory(ctx, dbgen.GetAllEventHistoryParams{
|
||||||
|
EventID: filter.EventID.ToPG(),
|
||||||
|
CreatedAfter: filter.CreatedAfter.ToPG(),
|
||||||
|
CreatedBefore: filter.CreatedBefore.ToPG(),
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("GetAllEventHistory failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return convertEventHistory(dbEventHistories), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) GetInitialEventPerDay(ctx context.Context, filter domain.EventHistoryFilter) ([]domain.EventHistory, error) {
|
||||||
|
dbEventHistories, err := s.queries.GetInitialEventPerDay(ctx, dbgen.GetInitialEventPerDayParams{
|
||||||
|
EventID: filter.EventID.ToPG(),
|
||||||
|
CreatedAfter: filter.CreatedAfter.ToPG(),
|
||||||
|
CreatedBefore: filter.CreatedBefore.ToPG(),
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("GetInitialEventPerDay failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return convertEventHistory(dbEventHistories), nil
|
||||||
|
}
|
||||||
60
internal/repository/odd_history.go
Normal file
60
internal/repository/odd_history.go
Normal file
|
|
@ -0,0 +1,60 @@
|
||||||
|
package repository
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
dbgen "github.com/SamuelTariku/FortuneBet-Backend/gen/db"
|
||||||
|
"github.com/SamuelTariku/FortuneBet-Backend/internal/domain"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (s *Store) InsertOddHistory(ctx context.Context, odd domain.CreateOddHistory) (domain.OddHistory, error) {
|
||||||
|
dbOddHistory, err := s.queries.InsertOddHistory(ctx, domain.ConvertCreateOddHistory(odd))
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return domain.OddHistory{}, fmt.Errorf("InsertOddHistory failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return domain.ConvertDBOddHistory(dbOddHistory), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertOddHistories(list []dbgen.OddHistory) []domain.OddHistory {
|
||||||
|
result := make([]domain.OddHistory, 0, len(list))
|
||||||
|
for _, item := range list {
|
||||||
|
result = append(result, domain.ConvertDBOddHistory(item))
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) GetAllOddHistory(ctx context.Context, filter domain.OddHistoryFilter) ([]domain.OddHistory, error) {
|
||||||
|
dbOddHistories, err := s.queries.GetAllOddHistory(ctx, dbgen.GetAllOddHistoryParams{
|
||||||
|
OddID: filter.OddID.ToPG(),
|
||||||
|
MarketID: filter.MarketID.ToPG(),
|
||||||
|
RawOddID: filter.RawOddID.ToPG(),
|
||||||
|
EventID: filter.EventID.ToPG(),
|
||||||
|
CreatedAfter: filter.CreatedAfter.ToPG(),
|
||||||
|
CreatedBefore: filter.CreatedBefore.ToPG(),
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("GetAllOddHistory failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return convertOddHistories(dbOddHistories), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) GetInitialOddPerDay(ctx context.Context, filter domain.OddHistoryFilter) ([]domain.OddHistory, error) {
|
||||||
|
dbOddHistories, err := s.queries.GetInitialOddPerDay(ctx, dbgen.GetInitialOddPerDayParams{
|
||||||
|
OddID: filter.OddID.ToPG(),
|
||||||
|
MarketID: filter.MarketID.ToPG(),
|
||||||
|
RawOddID: filter.RawOddID.ToPG(),
|
||||||
|
EventID: filter.EventID.ToPG(),
|
||||||
|
CreatedAfter: filter.CreatedAfter.ToPG(),
|
||||||
|
CreatedBefore: filter.CreatedBefore.ToPG(),
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("GetInitialOddPerDay failed: %w", err)
|
||||||
|
}
|
||||||
|
return convertOddHistories(dbOddHistories), nil
|
||||||
|
}
|
||||||
|
|
@ -3,6 +3,7 @@ package repository
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
"os"
|
"os"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
@ -53,6 +54,7 @@ func (s *Store) SaveNonLiveMarket(ctx context.Context, m domain.Market) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
err := s.queries.InsertNonLiveOdd(ctx, params)
|
err := s.queries.InsertNonLiveOdd(ctx, params)
|
||||||
|
fmt.Printf("Inserting Non Live Odd")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
_ = writeFailedMarketLog(m, err)
|
_ = writeFailedMarketLog(m, err)
|
||||||
continue
|
continue
|
||||||
|
|
@ -110,8 +112,8 @@ func (s *Store) GetPrematchOdds(ctx context.Context, eventID string) ([]domain.O
|
||||||
OddsValue: odd.OddsValue.Float64,
|
OddsValue: odd.OddsValue.Float64,
|
||||||
Section: odd.Section,
|
Section: odd.Section,
|
||||||
Category: odd.Category.String,
|
Category: odd.Category.String,
|
||||||
RawOdds: func() []domain.RawMessage {
|
RawOdds: func() []json.RawMessage {
|
||||||
var rawOdds []domain.RawMessage
|
var rawOdds []json.RawMessage
|
||||||
if err := json.Unmarshal(odd.RawOdds, &rawOdds); err != nil {
|
if err := json.Unmarshal(odd.RawOdds, &rawOdds); err != nil {
|
||||||
rawOdds = nil
|
rawOdds = nil
|
||||||
}
|
}
|
||||||
|
|
@ -147,8 +149,8 @@ func (s *Store) GetALLPrematchOdds(ctx context.Context) ([]domain.Odd, error) {
|
||||||
OddsValue: row.OddsValue.Float64,
|
OddsValue: row.OddsValue.Float64,
|
||||||
Section: row.Section,
|
Section: row.Section,
|
||||||
Category: row.Category.String,
|
Category: row.Category.String,
|
||||||
RawOdds: func() []domain.RawMessage {
|
RawOdds: func() []json.RawMessage {
|
||||||
var rawOdds []domain.RawMessage
|
var rawOdds []json.RawMessage
|
||||||
if err := json.Unmarshal(row.RawOdds, &rawOdds); err != nil {
|
if err := json.Unmarshal(row.RawOdds, &rawOdds); err != nil {
|
||||||
rawOdds = nil
|
rawOdds = nil
|
||||||
}
|
}
|
||||||
|
|
@ -163,13 +165,13 @@ func (s *Store) GetALLPrematchOdds(ctx context.Context) ([]domain.Odd, error) {
|
||||||
return domainOdds, nil
|
return domainOdds, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Store) GetRawOddsByMarketID(ctx context.Context, rawOddsID string, upcomingID string) (domain.RawOddsByMarketID, error) {
|
func (s *Store) GetRawOddsByMarketID(ctx context.Context, marketID string, upcomingID string) (domain.RawOddsByMarketID, error) {
|
||||||
params := dbgen.GetRawOddsByMarketIDParams{
|
params := dbgen.GetOddsByMarketIDParams{
|
||||||
MarketID: pgtype.Text{String: rawOddsID, Valid: true},
|
MarketID: pgtype.Text{String: marketID, Valid: true},
|
||||||
Fi: pgtype.Text{String: upcomingID, Valid: true},
|
Fi: pgtype.Text{String: upcomingID, Valid: true},
|
||||||
}
|
}
|
||||||
|
|
||||||
odds, err := s.queries.GetRawOddsByMarketID(ctx, params)
|
odds, err := s.queries.GetOddsByMarketID(ctx, params)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return domain.RawOddsByMarketID{}, err
|
return domain.RawOddsByMarketID{}, err
|
||||||
}
|
}
|
||||||
|
|
@ -183,27 +185,22 @@ func (s *Store) GetRawOddsByMarketID(ctx context.Context, rawOddsID string, upco
|
||||||
ID: int64(odds.ID),
|
ID: int64(odds.ID),
|
||||||
MarketName: odds.MarketName.String,
|
MarketName: odds.MarketName.String,
|
||||||
Handicap: odds.Handicap.String,
|
Handicap: odds.Handicap.String,
|
||||||
RawOdds: func() []domain.RawMessage {
|
RawOdds: func() []json.RawMessage {
|
||||||
converted := make([]domain.RawMessage, len(rawOdds))
|
converted := make([]json.RawMessage, len(rawOdds))
|
||||||
for i, r := range rawOdds {
|
for i, r := range rawOdds {
|
||||||
converted[i] = domain.RawMessage(r)
|
converted[i] = json.RawMessage(r)
|
||||||
}
|
}
|
||||||
return converted
|
return converted
|
||||||
}(),
|
}(),
|
||||||
FetchedAt: odds.FetchedAt.Time,
|
FetchedAt: odds.FetchedAt.Time,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
func (s *Store) GetPaginatedPrematchOddsByUpcomingID(ctx context.Context, upcomingID string, limit domain.ValidInt64, offset domain.ValidInt64) ([]domain.Odd, error) {
|
|
||||||
|
func (s *Store) GetPaginatedPrematchOddsByUpcomingID(ctx context.Context, upcomingID string, limit domain.ValidInt32, offset domain.ValidInt32) ([]domain.Odd, error) {
|
||||||
odds, err := s.queries.GetPaginatedPrematchOddsByUpcomingID(ctx, dbgen.GetPaginatedPrematchOddsByUpcomingIDParams{
|
odds, err := s.queries.GetPaginatedPrematchOddsByUpcomingID(ctx, dbgen.GetPaginatedPrematchOddsByUpcomingIDParams{
|
||||||
ID: upcomingID,
|
ID: upcomingID,
|
||||||
Limit: pgtype.Int4{
|
Limit: limit.ToPG(),
|
||||||
Int32: int32(limit.Value),
|
Offset: offset.ToPG(),
|
||||||
Valid: limit.Valid,
|
|
||||||
},
|
|
||||||
Offset: pgtype.Int4{
|
|
||||||
Int32: int32(offset.Value),
|
|
||||||
Valid: offset.Valid,
|
|
||||||
},
|
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -211,7 +208,7 @@ func (s *Store) GetPaginatedPrematchOddsByUpcomingID(ctx context.Context, upcomi
|
||||||
// Map the results to domain.Odd
|
// Map the results to domain.Odd
|
||||||
domainOdds := make([]domain.Odd, len(odds))
|
domainOdds := make([]domain.Odd, len(odds))
|
||||||
for i, odd := range odds {
|
for i, odd := range odds {
|
||||||
var rawOdds []domain.RawMessage
|
var rawOdds []json.RawMessage
|
||||||
if err := json.Unmarshal(odd.RawOdds, &rawOdds); err != nil {
|
if err := json.Unmarshal(odd.RawOdds, &rawOdds); err != nil {
|
||||||
rawOdds = nil
|
rawOdds = nil
|
||||||
}
|
}
|
||||||
|
|
@ -248,7 +245,7 @@ func (s *Store) GetPrematchOddsByUpcomingID(ctx context.Context, upcomingID stri
|
||||||
// Map the results to domain.Odd
|
// Map the results to domain.Odd
|
||||||
domainOdds := make([]domain.Odd, len(odds))
|
domainOdds := make([]domain.Odd, len(odds))
|
||||||
for i, odd := range odds {
|
for i, odd := range odds {
|
||||||
var rawOdds []domain.RawMessage
|
var rawOdds []json.RawMessage
|
||||||
if err := json.Unmarshal(odd.RawOdds, &rawOdds); err != nil {
|
if err := json.Unmarshal(odd.RawOdds, &rawOdds); err != nil {
|
||||||
rawOdds = nil
|
rawOdds = nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -16,5 +16,7 @@ type Service interface {
|
||||||
// GetAndStoreMatchResult(ctx context.Context, eventID string) error
|
// GetAndStoreMatchResult(ctx context.Context, eventID string) error
|
||||||
UpdateFinalScore(ctx context.Context, eventID, fullScore string, status domain.EventStatus) error
|
UpdateFinalScore(ctx context.Context, eventID, fullScore string, status domain.EventStatus) error
|
||||||
UpdateEventStatus(ctx context.Context, eventID string, status domain.EventStatus) error
|
UpdateEventStatus(ctx context.Context, eventID string, status domain.EventStatus) error
|
||||||
UpdateFeatured(ctx context.Context, eventID string, flagged bool) error
|
UpdateEventFeatured(ctx context.Context, eventID string, flagged bool) error
|
||||||
|
IsEventMonitored(ctx context.Context, eventID string) (bool, error)
|
||||||
|
UpdateEventMonitored(ctx context.Context, eventID string, IsMonitored bool) error
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -209,7 +209,6 @@ func (s *service) fetchUpcomingEventsFromProvider(ctx context.Context, source_ur
|
||||||
sportIDs := []int{1, 18, 17, 3, 83, 15, 12, 19, 8, 16, 91}
|
sportIDs := []int{1, 18, 17, 3, 83, 15, 12, 19, 8, 16, 91}
|
||||||
// sportIDs := []int{1}
|
// sportIDs := []int{1}
|
||||||
// TODO: Add the league skipping again when we have dynamic leagues
|
// TODO: Add the league skipping again when we have dynamic leagues
|
||||||
|
|
||||||
// b, err := os.OpenFile("logs/skipped_leagues.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
// b, err := os.OpenFile("logs/skipped_leagues.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
||||||
// if err != nil {
|
// if err != nil {
|
||||||
// log.Printf("❌ Failed to open leagues file %v", err)
|
// log.Printf("❌ Failed to open leagues file %v", err)
|
||||||
|
|
@ -242,7 +241,20 @@ func (s *service) fetchUpcomingEventsFromProvider(ctx context.Context, source_ur
|
||||||
}
|
}
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
body, _ := io.ReadAll(resp.Body)
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
s.mongoLogger.Error(
|
||||||
|
"Failed to read event response body",
|
||||||
|
zap.String("source", source),
|
||||||
|
zap.Int("sport_id", sportID),
|
||||||
|
zap.Int("page", page),
|
||||||
|
zap.Int("total_pages", totalPages),
|
||||||
|
zap.Error(err),
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
}
|
||||||
var data domain.BetResult
|
var data domain.BetResult
|
||||||
|
|
||||||
if err := json.Unmarshal(body, &data); err != nil || data.Success != 1 {
|
if err := json.Unmarshal(body, &data); err != nil || data.Success != 1 {
|
||||||
|
|
@ -345,6 +357,19 @@ func (s *service) fetchUpcomingEventsFromProvider(ctx context.Context, source_ur
|
||||||
event.MatchName = ev.Home.Name + " vs " + ev.Away.Name
|
event.MatchName = ev.Home.Name + " vs " + ev.Away.Name
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := s.CheckAndInsertEventHistory(ctx, event); err != nil {
|
||||||
|
s.mongoLogger.Error(
|
||||||
|
"failed to check and insert event history",
|
||||||
|
zap.String("leagueID", ev.League.ID),
|
||||||
|
zap.String("leagueName", ev.League.Name),
|
||||||
|
zap.String("source", source),
|
||||||
|
zap.Int("sport_id", sportID),
|
||||||
|
zap.Int("page", page),
|
||||||
|
zap.Int("total_pages", totalPages),
|
||||||
|
zap.Error(err),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
err = s.store.SaveUpcomingEvent(ctx, event)
|
err = s.store.SaveUpcomingEvent(ctx, event)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
s.mongoLogger.Error(
|
s.mongoLogger.Error(
|
||||||
|
|
@ -388,6 +413,60 @@ func (s *service) fetchUpcomingEventsFromProvider(ctx context.Context, source_ur
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *service) CheckAndInsertEventHistory(ctx context.Context, event domain.UpcomingEvent) error {
|
||||||
|
isEventMonitored, err := s.store.IsEventMonitored(ctx, event.ID)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
s.mongoLogger.Error(
|
||||||
|
"failed to get event is_monitored",
|
||||||
|
zap.String("eventID", event.ID),
|
||||||
|
zap.Int32("leagueID", event.LeagueID),
|
||||||
|
zap.String("leagueName", event.LeagueName),
|
||||||
|
zap.Int32("sport_id", event.SportID),
|
||||||
|
zap.Error(err),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !isEventMonitored {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
oldEvent, err := s.GetUpcomingEventByID(ctx, event.ID)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
s.mongoLogger.Error(
|
||||||
|
"failed to get event by id",
|
||||||
|
zap.String("eventID", event.ID),
|
||||||
|
zap.Int32("leagueID", event.LeagueID),
|
||||||
|
zap.String("leagueName", event.LeagueName),
|
||||||
|
zap.Int32("sport_id", event.SportID),
|
||||||
|
zap.Error(err),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if oldEvent.Status != event.Status {
|
||||||
|
_, err := s.store.InsertEventHistory(ctx, domain.CreateEventHistory{
|
||||||
|
EventID: event.ID,
|
||||||
|
Status: string(event.Status),
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
s.mongoLogger.Error(
|
||||||
|
"failed to get event by id",
|
||||||
|
zap.String("eventID", event.ID),
|
||||||
|
zap.Int32("leagueID", event.LeagueID),
|
||||||
|
zap.String("leagueName", event.LeagueName),
|
||||||
|
zap.Int32("sport_id", event.SportID),
|
||||||
|
zap.Error(err),
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func getString(v interface{}) string {
|
func getString(v interface{}) string {
|
||||||
if str, ok := v.(string); ok {
|
if str, ok := v.(string); ok {
|
||||||
return str
|
return str
|
||||||
|
|
@ -438,8 +517,15 @@ func (s *service) UpdateEventStatus(ctx context.Context, eventID string, status
|
||||||
return s.store.UpdateEventStatus(ctx, eventID, status)
|
return s.store.UpdateEventStatus(ctx, eventID, status)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *service) UpdateFeatured(ctx context.Context, eventID string, flagged bool) error {
|
func (s *service) UpdateEventFeatured(ctx context.Context, eventID string, flagged bool) error {
|
||||||
return s.store.UpdateFeatured(ctx, eventID, flagged)
|
return s.store.UpdateEventFeatured(ctx, eventID, flagged)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *service) IsEventMonitored(ctx context.Context, eventID string) (bool, error) {
|
||||||
|
return s.store.IsEventMonitored(ctx, eventID)
|
||||||
|
}
|
||||||
|
func (s *service) UpdateEventMonitored(ctx context.Context, eventID string, IsMonitored bool) error {
|
||||||
|
return s.store.UpdateEventFeatured(ctx, eventID, IsMonitored)
|
||||||
}
|
}
|
||||||
|
|
||||||
// func (s *service) GetAndStoreMatchResult(ctx context.Context, eventID string) error {
|
// func (s *service) GetAndStoreMatchResult(ctx context.Context, eventID string) error {
|
||||||
|
|
|
||||||
|
|
@ -17,4 +17,7 @@ type Service interface {
|
||||||
GetALLPrematchOdds(ctx context.Context) ([]domain.Odd, error)
|
GetALLPrematchOdds(ctx context.Context) ([]domain.Odd, error)
|
||||||
GetRawOddsByMarketID(ctx context.Context, marketID string, upcomingID string) (domain.RawOddsByMarketID, error)
|
GetRawOddsByMarketID(ctx context.Context, marketID string, upcomingID string) (domain.RawOddsByMarketID, error)
|
||||||
DeleteOddsForEvent(ctx context.Context, eventID string) error
|
DeleteOddsForEvent(ctx context.Context, eventID string) error
|
||||||
|
InsertOddHistory(ctx context.Context, odd domain.CreateOddHistory) (domain.OddHistory, error)
|
||||||
|
GetAllOddHistory(ctx context.Context, filter domain.OddHistoryFilter) ([]domain.OddHistory, error)
|
||||||
|
GetInitialOddPerDay(ctx context.Context, filter domain.OddHistoryFilter) ([]domain.OddHistory, error)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -16,21 +16,24 @@ import (
|
||||||
"github.com/SamuelTariku/FortuneBet-Backend/internal/config"
|
"github.com/SamuelTariku/FortuneBet-Backend/internal/config"
|
||||||
"github.com/SamuelTariku/FortuneBet-Backend/internal/domain"
|
"github.com/SamuelTariku/FortuneBet-Backend/internal/domain"
|
||||||
"github.com/SamuelTariku/FortuneBet-Backend/internal/repository"
|
"github.com/SamuelTariku/FortuneBet-Backend/internal/repository"
|
||||||
|
"github.com/SamuelTariku/FortuneBet-Backend/internal/services/event"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ServiceImpl struct {
|
type ServiceImpl struct {
|
||||||
store *repository.Store
|
store *repository.Store
|
||||||
config *config.Config
|
config *config.Config
|
||||||
|
eventSvc event.Service
|
||||||
logger *slog.Logger
|
logger *slog.Logger
|
||||||
mongoLogger *zap.Logger
|
mongoLogger *zap.Logger
|
||||||
client *http.Client
|
client *http.Client
|
||||||
}
|
}
|
||||||
|
|
||||||
func New(store *repository.Store, cfg *config.Config, logger *slog.Logger, mongoLogger *zap.Logger) *ServiceImpl {
|
func New(store *repository.Store, cfg *config.Config, eventSvc event.Service, logger *slog.Logger, mongoLogger *zap.Logger) *ServiceImpl {
|
||||||
return &ServiceImpl{
|
return &ServiceImpl{
|
||||||
store: store,
|
store: store,
|
||||||
config: cfg,
|
config: cfg,
|
||||||
|
eventSvc: eventSvc,
|
||||||
logger: logger,
|
logger: logger,
|
||||||
mongoLogger: mongoLogger,
|
mongoLogger: mongoLogger,
|
||||||
client: &http.Client{Timeout: 10 * time.Second},
|
client: &http.Client{Timeout: 10 * time.Second},
|
||||||
|
|
@ -76,7 +79,7 @@ func (s *ServiceImpl) FetchNonLiveOdds(ctx context.Context) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ServiceImpl) fetchBet365Odds(ctx context.Context) error {
|
func (s *ServiceImpl) fetchBet365Odds(ctx context.Context) error {
|
||||||
eventIDs, err := s.store.GetAllUpcomingEvents(ctx)
|
eventIDs, err := s.eventSvc.GetAllUpcomingEvents(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
s.mongoLogger.Error(
|
s.mongoLogger.Error(
|
||||||
"Failed to fetch upcoming event IDs",
|
"Failed to fetch upcoming event IDs",
|
||||||
|
|
@ -589,6 +592,17 @@ func (s *ServiceImpl) storeSection(ctx context.Context, eventID, fi, sectionName
|
||||||
Source: "bet365",
|
Source: "bet365",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := s.CheckAndInsertOddHistory(ctx, marketRecord); err != nil {
|
||||||
|
s.mongoLogger.Error(
|
||||||
|
"failed to check and insert odd history",
|
||||||
|
zap.String("market_id", marketIDstr),
|
||||||
|
zap.String("market_name", market.Name),
|
||||||
|
zap.String("eventID", eventID),
|
||||||
|
zap.Error(err),
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
err = s.store.SaveNonLiveMarket(ctx, marketRecord)
|
err = s.store.SaveNonLiveMarket(ctx, marketRecord)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
s.mongoLogger.Error(
|
s.mongoLogger.Error(
|
||||||
|
|
@ -598,7 +612,7 @@ func (s *ServiceImpl) storeSection(ctx context.Context, eventID, fi, sectionName
|
||||||
zap.String("eventID", eventID),
|
zap.String("eventID", eventID),
|
||||||
zap.Error(err),
|
zap.Error(err),
|
||||||
)
|
)
|
||||||
errs = append(errs, fmt.Errorf("market %s: %w", market.ID, err))
|
errs = append(errs, fmt.Errorf("market %v: %w", market.ID, err))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -609,6 +623,99 @@ func (s *ServiceImpl) storeSection(ctx context.Context, eventID, fi, sectionName
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *ServiceImpl) CheckAndInsertOddHistory(ctx context.Context, market domain.Market) error {
|
||||||
|
isEventMonitored, err := s.eventSvc.IsEventMonitored(ctx, market.EventID)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
s.mongoLogger.Error(
|
||||||
|
"failed to get is_monitored",
|
||||||
|
zap.String("market_id", market.MarketID),
|
||||||
|
zap.String("market_name", market.Name),
|
||||||
|
zap.String("eventID", market.EventID),
|
||||||
|
zap.Error(err),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !isEventMonitored {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
oldOdds, err := s.store.GetRawOddsByMarketID(ctx, market.MarketID, market.EventID)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
s.mongoLogger.Error(
|
||||||
|
"failed to get raw odds by market id",
|
||||||
|
zap.String("market_id", market.MarketID),
|
||||||
|
zap.String("market_name", market.Name),
|
||||||
|
zap.String("eventID", market.EventID),
|
||||||
|
zap.Error(err),
|
||||||
|
)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(oldOdds.RawOdds) != len(market.Odds) {
|
||||||
|
s.mongoLogger.Error(
|
||||||
|
"new odds data does not match old odds data",
|
||||||
|
zap.String("market_id", market.MarketID),
|
||||||
|
zap.String("market_name", market.Name),
|
||||||
|
zap.String("eventID", market.EventID),
|
||||||
|
zap.Error(err),
|
||||||
|
)
|
||||||
|
return fmt.Errorf("new odds data does not match old odds data")
|
||||||
|
}
|
||||||
|
|
||||||
|
oldRawOdds, err := convertRawMessage(oldOdds.RawOdds)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
s.mongoLogger.Error(
|
||||||
|
"failed to convert raw odds to map",
|
||||||
|
zap.String("market_id", market.MarketID),
|
||||||
|
zap.String("market_name", market.Name),
|
||||||
|
zap.String("eventID", market.EventID),
|
||||||
|
zap.Error(err),
|
||||||
|
)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, oddData := range market.Odds {
|
||||||
|
newRawOddID := getInt(oddData["id"])
|
||||||
|
newOddsVal := getFloat(oddData["odds"])
|
||||||
|
isFound := false
|
||||||
|
for _, oldOddData := range oldRawOdds {
|
||||||
|
oldRawOddID := getInt(oldOddData["id"])
|
||||||
|
oldOddsVal := getFloat(oldOddData["odds"])
|
||||||
|
if newRawOddID == oldRawOddID {
|
||||||
|
if newOddsVal != oldOddsVal {
|
||||||
|
_, err := s.store.InsertOddHistory(ctx, domain.CreateOddHistory{
|
||||||
|
OddID: oldOdds.ID,
|
||||||
|
MarketID: market.MarketID,
|
||||||
|
RawOddID: int64(newRawOddID),
|
||||||
|
EventID: market.EventID,
|
||||||
|
OddValue: newOddsVal,
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
s.mongoLogger.Error(
|
||||||
|
"failed to insert odd history",
|
||||||
|
zap.String("market_id", market.MarketID),
|
||||||
|
zap.String("market_name", market.Name),
|
||||||
|
zap.String("eventID", market.EventID),
|
||||||
|
zap.Int64("odd_id", oldOdds.ID),
|
||||||
|
zap.Int("raw_odd_id", newRawOddID),
|
||||||
|
zap.Error(err),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
isFound = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !isFound {
|
||||||
|
fmt.Printf("raw odd id %d not found", newRawOddID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (s *ServiceImpl) GetPrematchOdds(ctx context.Context, eventID string) ([]domain.Odd, error) {
|
func (s *ServiceImpl) GetPrematchOdds(ctx context.Context, eventID string) ([]domain.Odd, error) {
|
||||||
return s.store.GetPrematchOdds(ctx, eventID)
|
return s.store.GetPrematchOdds(ctx, eventID)
|
||||||
}
|
}
|
||||||
|
|
@ -630,7 +737,7 @@ func (s *ServiceImpl) GetPrematchOddsByUpcomingID(ctx context.Context, upcomingI
|
||||||
return s.store.GetPrematchOddsByUpcomingID(ctx, upcomingID)
|
return s.store.GetPrematchOddsByUpcomingID(ctx, upcomingID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ServiceImpl) GetPaginatedPrematchOddsByUpcomingID(ctx context.Context, upcomingID string, limit, offset domain.ValidInt64) ([]domain.Odd, error) {
|
func (s *ServiceImpl) GetPaginatedPrematchOddsByUpcomingID(ctx context.Context, upcomingID string, limit, offset domain.ValidInt32) ([]domain.Odd, error) {
|
||||||
return s.store.GetPaginatedPrematchOddsByUpcomingID(ctx, upcomingID, limit, offset)
|
return s.store.GetPaginatedPrematchOddsByUpcomingID(ctx, upcomingID, limit, offset)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -651,6 +758,12 @@ func getInt(v interface{}) int {
|
||||||
}
|
}
|
||||||
return -1
|
return -1
|
||||||
}
|
}
|
||||||
|
func getFloat(v interface{}) float64 {
|
||||||
|
if n, ok := v.(float64); ok {
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
func getMap(v interface{}) map[string]interface{} {
|
func getMap(v interface{}) map[string]interface{} {
|
||||||
if m, ok := v.(map[string]interface{}); ok {
|
if m, ok := v.(map[string]interface{}); ok {
|
||||||
|
|
|
||||||
|
|
@ -350,7 +350,7 @@ func (h *Handler) UpdateEventFeatured(c *fiber.Ctx) error {
|
||||||
)
|
)
|
||||||
return fiber.NewError(fiber.StatusBadRequest, errMsg)
|
return fiber.NewError(fiber.StatusBadRequest, errMsg)
|
||||||
}
|
}
|
||||||
err := h.eventSvc.UpdateFeatured(c.Context(), eventID, req.Featured)
|
err := h.eventSvc.UpdateEventFeatured(c.Context(), eventID, req.Featured)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
h.mongoLoggerSvc.Error("Failed to update event featured",
|
h.mongoLoggerSvc.Error("Failed to update event featured",
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue
Block a user