limiting the crons because of the request limit
This commit is contained in:
parent
b5166fa53d
commit
9e4b0d9942
|
|
@ -19,8 +19,8 @@ CREATE TABLE IF NOT EXISTS referral_settings (
|
||||||
CREATE TABLE IF NOT EXISTS referrals (
|
CREATE TABLE IF NOT EXISTS referrals (
|
||||||
id BIGSERIAL PRIMARY KEY,
|
id BIGSERIAL PRIMARY KEY,
|
||||||
referral_code VARCHAR(10) NOT NULL UNIQUE,
|
referral_code VARCHAR(10) NOT NULL UNIQUE,
|
||||||
-- referrer_id VARCHAR(255) NOT NULL,
|
referrer_id VARCHAR(255) NOT NULL,
|
||||||
-- referred_id VARCHAR(255) UNIQUE,
|
referred_id VARCHAR(255) UNIQUE,
|
||||||
status ReferralStatus NOT NULL DEFAULT 'PENDING',
|
status ReferralStatus NOT NULL DEFAULT 'PENDING',
|
||||||
reward_amount DECIMAL(15, 2) NOT NULL DEFAULT 0.00,
|
reward_amount DECIMAL(15, 2) NOT NULL DEFAULT 0.00,
|
||||||
cashback_amount DECIMAL(15, 2) NOT NULL DEFAULT 0.00,
|
cashback_amount DECIMAL(15, 2) NOT NULL DEFAULT 0.00,
|
||||||
|
|
@ -33,7 +33,7 @@ CREATE TABLE IF NOT EXISTS referrals (
|
||||||
CONSTRAINT cashback_amount_positive CHECK (cashback_amount >= 0)
|
CONSTRAINT cashback_amount_positive CHECK (cashback_amount >= 0)
|
||||||
);
|
);
|
||||||
CREATE INDEX idx_referrals_referral_code ON referrals (referral_code);
|
CREATE INDEX idx_referrals_referral_code ON referrals (referral_code);
|
||||||
-- CREATE INDEX idx_referrals_referrer_id ON referrals (referrer_id);
|
CREATE INDEX idx_referrals_referrer_id ON referrals (referrer_id);
|
||||||
CREATE INDEX idx_referrals_status ON referrals (status);
|
CREATE INDEX idx_referrals_status ON referrals (status);
|
||||||
ALTER TABLE users
|
ALTER TABLE users
|
||||||
ADD COLUMN IF NOT EXISTS referral_code VARCHAR(10) UNIQUE,
|
ADD COLUMN IF NOT EXISTS referral_code VARCHAR(10) UNIQUE,
|
||||||
|
|
|
||||||
|
|
@ -140,6 +140,28 @@ FROM events
|
||||||
WHERE is_live = false
|
WHERE is_live = false
|
||||||
AND status = 'upcoming'
|
AND status = 'upcoming'
|
||||||
ORDER BY start_time ASC;
|
ORDER BY start_time ASC;
|
||||||
|
-- name: GetExpiredUpcomingEvents :many
|
||||||
|
SELECT id,
|
||||||
|
sport_id,
|
||||||
|
match_name,
|
||||||
|
home_team,
|
||||||
|
away_team,
|
||||||
|
home_team_id,
|
||||||
|
away_team_id,
|
||||||
|
home_kit_image,
|
||||||
|
away_kit_image,
|
||||||
|
league_id,
|
||||||
|
league_name,
|
||||||
|
league_cc,
|
||||||
|
start_time,
|
||||||
|
is_live,
|
||||||
|
status,
|
||||||
|
fetched_at
|
||||||
|
FROM events
|
||||||
|
WHERE is_live = false
|
||||||
|
AND status = 'upcoming'
|
||||||
|
AND start_time < now()
|
||||||
|
ORDER BY start_time ASC;
|
||||||
-- name: GetTotalEvents :one
|
-- name: GetTotalEvents :one
|
||||||
SELECT COUNT(*)
|
SELECT COUNT(*)
|
||||||
FROM events
|
FROM events
|
||||||
|
|
@ -205,7 +227,6 @@ WHERE id = $1
|
||||||
AND is_live = false
|
AND is_live = false
|
||||||
AND status = 'upcoming'
|
AND status = 'upcoming'
|
||||||
LIMIT 1;
|
LIMIT 1;
|
||||||
|
|
||||||
-- name: UpdateMatchResult :exec
|
-- name: UpdateMatchResult :exec
|
||||||
UPDATE events
|
UPDATE events
|
||||||
SET score = $1,
|
SET score = $1,
|
||||||
|
|
|
||||||
|
|
@ -90,6 +90,86 @@ func (q *Queries) GetAllUpcomingEvents(ctx context.Context) ([]GetAllUpcomingEve
|
||||||
return items, nil
|
return items, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const GetExpiredUpcomingEvents = `-- name: GetExpiredUpcomingEvents :many
|
||||||
|
SELECT id,
|
||||||
|
sport_id,
|
||||||
|
match_name,
|
||||||
|
home_team,
|
||||||
|
away_team,
|
||||||
|
home_team_id,
|
||||||
|
away_team_id,
|
||||||
|
home_kit_image,
|
||||||
|
away_kit_image,
|
||||||
|
league_id,
|
||||||
|
league_name,
|
||||||
|
league_cc,
|
||||||
|
start_time,
|
||||||
|
is_live,
|
||||||
|
status,
|
||||||
|
fetched_at
|
||||||
|
FROM events
|
||||||
|
WHERE is_live = false
|
||||||
|
AND status = 'upcoming'
|
||||||
|
AND start_time < now()
|
||||||
|
ORDER BY start_time ASC
|
||||||
|
`
|
||||||
|
|
||||||
|
type GetExpiredUpcomingEventsRow struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
SportID pgtype.Text `json:"sport_id"`
|
||||||
|
MatchName pgtype.Text `json:"match_name"`
|
||||||
|
HomeTeam pgtype.Text `json:"home_team"`
|
||||||
|
AwayTeam pgtype.Text `json:"away_team"`
|
||||||
|
HomeTeamID pgtype.Text `json:"home_team_id"`
|
||||||
|
AwayTeamID pgtype.Text `json:"away_team_id"`
|
||||||
|
HomeKitImage pgtype.Text `json:"home_kit_image"`
|
||||||
|
AwayKitImage pgtype.Text `json:"away_kit_image"`
|
||||||
|
LeagueID pgtype.Text `json:"league_id"`
|
||||||
|
LeagueName pgtype.Text `json:"league_name"`
|
||||||
|
LeagueCc pgtype.Text `json:"league_cc"`
|
||||||
|
StartTime pgtype.Timestamp `json:"start_time"`
|
||||||
|
IsLive pgtype.Bool `json:"is_live"`
|
||||||
|
Status pgtype.Text `json:"status"`
|
||||||
|
FetchedAt pgtype.Timestamp `json:"fetched_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) GetExpiredUpcomingEvents(ctx context.Context) ([]GetExpiredUpcomingEventsRow, error) {
|
||||||
|
rows, err := q.db.Query(ctx, GetExpiredUpcomingEvents)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []GetExpiredUpcomingEventsRow
|
||||||
|
for rows.Next() {
|
||||||
|
var i GetExpiredUpcomingEventsRow
|
||||||
|
if err := rows.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.SportID,
|
||||||
|
&i.MatchName,
|
||||||
|
&i.HomeTeam,
|
||||||
|
&i.AwayTeam,
|
||||||
|
&i.HomeTeamID,
|
||||||
|
&i.AwayTeamID,
|
||||||
|
&i.HomeKitImage,
|
||||||
|
&i.AwayKitImage,
|
||||||
|
&i.LeagueID,
|
||||||
|
&i.LeagueName,
|
||||||
|
&i.LeagueCc,
|
||||||
|
&i.StartTime,
|
||||||
|
&i.IsLive,
|
||||||
|
&i.Status,
|
||||||
|
&i.FetchedAt,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
|
|
||||||
const GetPaginatedUpcomingEvents = `-- name: GetPaginatedUpcomingEvents :many
|
const GetPaginatedUpcomingEvents = `-- name: GetPaginatedUpcomingEvents :many
|
||||||
SELECT id,
|
SELECT id,
|
||||||
sport_id,
|
sport_id,
|
||||||
|
|
|
||||||
|
|
@ -89,6 +89,34 @@ func (s *Store) GetAllUpcomingEvents(ctx context.Context) ([]domain.UpcomingEven
|
||||||
}
|
}
|
||||||
return upcomingEvents, nil
|
return upcomingEvents, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *Store) GetExpiredUpcomingEvents(ctx context.Context) ([]domain.UpcomingEvent, error) {
|
||||||
|
events, err := s.queries.GetExpiredUpcomingEvents(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
upcomingEvents := make([]domain.UpcomingEvent, len(events))
|
||||||
|
for i, e := range events {
|
||||||
|
upcomingEvents[i] = domain.UpcomingEvent{
|
||||||
|
ID: e.ID,
|
||||||
|
SportID: e.SportID.String,
|
||||||
|
MatchName: e.MatchName.String,
|
||||||
|
HomeTeam: e.HomeTeam.String,
|
||||||
|
AwayTeam: e.AwayTeam.String,
|
||||||
|
HomeTeamID: e.HomeTeamID.String,
|
||||||
|
AwayTeamID: e.AwayTeamID.String,
|
||||||
|
HomeKitImage: e.HomeKitImage.String,
|
||||||
|
AwayKitImage: e.AwayKitImage.String,
|
||||||
|
LeagueID: e.LeagueID.String,
|
||||||
|
LeagueName: e.LeagueName.String,
|
||||||
|
LeagueCC: e.LeagueCc.String,
|
||||||
|
StartTime: e.StartTime.Time.UTC(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return upcomingEvents, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (s *Store) GetPaginatedUpcomingEvents(ctx context.Context, limit int32, offset int32, leagueID domain.ValidString, sportID domain.ValidString) ([]domain.UpcomingEvent, int64, error) {
|
func (s *Store) GetPaginatedUpcomingEvents(ctx context.Context, limit int32, offset int32, leagueID domain.ValidString, sportID domain.ValidString) ([]domain.UpcomingEvent, int64, error) {
|
||||||
events, err := s.queries.GetPaginatedUpcomingEvents(ctx, dbgen.GetPaginatedUpcomingEventsParams{
|
events, err := s.queries.GetPaginatedUpcomingEvents(ctx, dbgen.GetPaginatedUpcomingEventsParams{
|
||||||
LeagueID: pgtype.Text{
|
LeagueID: pgtype.Text{
|
||||||
|
|
@ -165,16 +193,16 @@ func (s *Store) GetUpcomingEventByID(ctx context.Context, ID string) (domain.Upc
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
func (s *Store) UpdateFinalScore(ctx context.Context, eventID, fullScore, status string) error {
|
func (s *Store) UpdateFinalScore(ctx context.Context, eventID, fullScore, status string) error {
|
||||||
params := dbgen.UpdateMatchResultParams{
|
params := dbgen.UpdateMatchResultParams{
|
||||||
Score: pgtype.Text{String: fullScore, Valid: true},
|
Score: pgtype.Text{String: fullScore, Valid: true},
|
||||||
Status: pgtype.Text{String: status, Valid: true},
|
Status: pgtype.Text{String: status, Valid: true},
|
||||||
ID: eventID,
|
ID: eventID,
|
||||||
}
|
}
|
||||||
|
|
||||||
err := s.queries.UpdateMatchResult(ctx, params)
|
err := s.queries.UpdateMatchResult(ctx, params)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to update final score for event %s: %w", eventID, err)
|
return fmt.Errorf("failed to update final score for event %s: %w", eventID, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
@ -10,6 +10,7 @@ type Service interface {
|
||||||
FetchLiveEvents(ctx context.Context) error
|
FetchLiveEvents(ctx context.Context) error
|
||||||
FetchUpcomingEvents(ctx context.Context) error
|
FetchUpcomingEvents(ctx context.Context) error
|
||||||
GetAllUpcomingEvents(ctx context.Context) ([]domain.UpcomingEvent, error)
|
GetAllUpcomingEvents(ctx context.Context) ([]domain.UpcomingEvent, error)
|
||||||
|
GetExpiredUpcomingEvents(ctx context.Context) ([]domain.UpcomingEvent, error)
|
||||||
GetPaginatedUpcomingEvents(ctx context.Context, limit int32, offset int32, leagueID domain.ValidString, sportID domain.ValidString) ([]domain.UpcomingEvent, int64, error)
|
GetPaginatedUpcomingEvents(ctx context.Context, limit int32, offset int32, leagueID domain.ValidString, sportID domain.ValidString) ([]domain.UpcomingEvent, int64, error)
|
||||||
GetUpcomingEventByID(ctx context.Context, ID string) (domain.UpcomingEvent, error)
|
GetUpcomingEventByID(ctx context.Context, ID string) (domain.UpcomingEvent, error)
|
||||||
// GetAndStoreMatchResult(ctx context.Context, eventID string) error
|
// GetAndStoreMatchResult(ctx context.Context, eventID string) error
|
||||||
|
|
|
||||||
|
|
@ -101,7 +101,7 @@ func (s *service) FetchUpcomingEvents(ctx context.Context) error {
|
||||||
sportIDs := []int{1}
|
sportIDs := []int{1}
|
||||||
var totalPages int = 1
|
var totalPages int = 1
|
||||||
var page int = 0
|
var page int = 0
|
||||||
var limit int = 5
|
var limit int = 100
|
||||||
var count int = 0
|
var count int = 0
|
||||||
for _, sportID := range sportIDs {
|
for _, sportID := range sportIDs {
|
||||||
for page != totalPages {
|
for page != totalPages {
|
||||||
|
|
@ -199,6 +199,10 @@ func (s *service) GetAllUpcomingEvents(ctx context.Context) ([]domain.UpcomingEv
|
||||||
return s.store.GetAllUpcomingEvents(ctx)
|
return s.store.GetAllUpcomingEvents(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *service) GetExpiredUpcomingEvents(ctx context.Context) ([]domain.UpcomingEvent, error) {
|
||||||
|
return s.store.GetExpiredUpcomingEvents(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
func (s *service) GetPaginatedUpcomingEvents(ctx context.Context, limit int32, offset int32, leagueID domain.ValidString, sportID domain.ValidString) ([]domain.UpcomingEvent, int64, error) {
|
func (s *service) GetPaginatedUpcomingEvents(ctx context.Context, limit int32, offset int32, leagueID domain.ValidString, sportID domain.ValidString) ([]domain.UpcomingEvent, int64, error) {
|
||||||
return s.store.GetPaginatedUpcomingEvents(ctx, limit, offset, leagueID, sportID)
|
return s.store.GetPaginatedUpcomingEvents(ctx, limit, offset, leagueID, sportID)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
package httpserver
|
package httpserver
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
// "context"
|
||||||
|
|
||||||
"context"
|
"context"
|
||||||
"log"
|
"log"
|
||||||
|
|
||||||
|
|
@ -34,34 +36,38 @@ func StartDataFetchingCrons(eventService eventsvc.Service, oddsService oddssvc.S
|
||||||
// }
|
// }
|
||||||
// },
|
// },
|
||||||
// },
|
// },
|
||||||
{
|
// {
|
||||||
spec: "0 */15 * * * *", // Every 15 minutes
|
// // spec: "0 */15 * * * *", // Every 15 minutes
|
||||||
task: func() {
|
// spec: "0 0 * * * *", // TODO: Every hour because of the 3600 requests per hour limit
|
||||||
if err := oddsService.FetchNonLiveOdds(context.Background()); err != nil {
|
// task: func() {
|
||||||
log.Printf("FetchNonLiveOdds error: %v", err)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
spec: "0 */15 * * * *",
|
|
||||||
task: func() {
|
|
||||||
log.Println("Fetching results for upcoming events...")
|
|
||||||
|
|
||||||
upcomingEvents, err := eventService.GetAllUpcomingEvents(context.Background())
|
// if err := oddsService.FetchNonLiveOdds(context.Background()); err != nil {
|
||||||
if err != nil {
|
// log.Printf("FetchNonLiveOdds error: %v", err)
|
||||||
log.Printf("Failed to fetch upcoming events: %v", err)
|
// }
|
||||||
return
|
// time.Sleep(2 * time.Second) //This will restrict the fetching to 1800 requests per hour
|
||||||
}
|
|
||||||
|
|
||||||
for _, event := range upcomingEvents {
|
// },
|
||||||
if err := resultService.FetchAndStoreResult(context.Background(), event.ID); err != nil {
|
// },
|
||||||
log.Printf(" Failed to fetch/store result for event %s: %v", event.ID, err)
|
// {
|
||||||
} else {
|
// spec: "0 */15 * * * *",
|
||||||
log.Printf(" Successfully stored result for event %s", event.ID)
|
// task: func() {
|
||||||
}
|
// log.Println("Fetching results for upcoming events...")
|
||||||
}
|
|
||||||
},
|
// upcomingEvents, err := eventService.GetAllUpcomingEvents(context.Background())
|
||||||
},
|
// if err != nil {
|
||||||
|
// log.Printf("Failed to fetch upcoming events: %v", err)
|
||||||
|
// return
|
||||||
|
// }
|
||||||
|
|
||||||
|
// for _, event := range upcomingEvents {
|
||||||
|
// if err := resultService.FetchAndStoreResult(context.Background(), event.ID); err != nil {
|
||||||
|
// log.Printf(" Failed to fetch/store result for event %s: %v", event.ID, err)
|
||||||
|
// } else {
|
||||||
|
// log.Printf(" Successfully stored result for event %s", event.ID)
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// },
|
||||||
|
// },
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, job := range schedule {
|
for _, job := range schedule {
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,6 @@
|
||||||
package handlers
|
package handlers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
"github.com/SamuelTariku/FortuneBet-Backend/internal/domain"
|
"github.com/SamuelTariku/FortuneBet-Backend/internal/domain"
|
||||||
|
|
@ -103,7 +102,6 @@ func (h *Handler) GetRawOddsByMarketID(c *fiber.Ctx) error {
|
||||||
// @Failure 500 {object} response.APIResponse
|
// @Failure 500 {object} response.APIResponse
|
||||||
// @Router /prematch/events [get]
|
// @Router /prematch/events [get]
|
||||||
func (h *Handler) GetAllUpcomingEvents(c *fiber.Ctx) error {
|
func (h *Handler) GetAllUpcomingEvents(c *fiber.Ctx) error {
|
||||||
|
|
||||||
page := c.QueryInt("page", 1)
|
page := c.QueryInt("page", 1)
|
||||||
pageSize := c.QueryInt("page_size", 10)
|
pageSize := c.QueryInt("page_size", 10)
|
||||||
leagueIDQuery := c.Query("league_id")
|
leagueIDQuery := c.Query("league_id")
|
||||||
|
|
@ -120,9 +118,9 @@ func (h *Handler) GetAllUpcomingEvents(c *fiber.Ctx) error {
|
||||||
|
|
||||||
events, total, err := h.eventSvc.GetPaginatedUpcomingEvents(c.Context(), int32(pageSize), int32(page)-1, leagueID, sportID)
|
events, total, err := h.eventSvc.GetPaginatedUpcomingEvents(c.Context(), int32(pageSize), int32(page)-1, leagueID, sportID)
|
||||||
|
|
||||||
fmt.Printf("League ID: %v", leagueID)
|
// fmt.Printf("League ID: %v", leagueID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
h.logger.Error("getting error", err)
|
h.logger.Error("getting error", "error", err)
|
||||||
return response.WriteJSON(c, fiber.StatusInternalServerError, "Failed to retrieve all upcoming events", nil, nil)
|
return response.WriteJSON(c, fiber.StatusInternalServerError, "Failed to retrieve all upcoming events", nil, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue
Block a user