add some fund graph history for transparency
Build Images and Deploy / Update-PROD-Stack (push) Successful in 1m22s

This commit is contained in:
2026-03-19 15:59:47 -04:00
parent 5e9421801e
commit 9a87c0c7c5
5 changed files with 112 additions and 6 deletions
+12
View File
@@ -41,6 +41,7 @@ model HedgeFund {
managers FundManager[] managers FundManager[]
investments FundInvestment[] investments FundInvestment[]
navHistory FundNavHistory[]
@@index([slug]) @@index([slug])
} }
@@ -131,6 +132,17 @@ model PriceHistory {
@@index([hashtagId, recordedAt]) @@index([hashtagId, recordedAt])
} }
model FundNavHistory {
id String @id @default(cuid())
fundId String
fund HedgeFund @relation(fields: [fundId], references: [id], onDelete: Cascade)
nav Float
totalValue Float
recordedAt DateTime @default(now())
@@index([fundId, recordedAt])
}
model Position { model Position {
id String @id @default(cuid()) id String @id @default(cuid())
userId String userId String
+20
View File
@@ -8,6 +8,7 @@ import Link from 'next/link'
import { Building2, TrendingUp, TrendingDown } from 'lucide-react' import { Building2, TrendingUp, TrendingDown } from 'lucide-react'
import { calcFundNav } from '@/lib/pricing' import { calcFundNav } from '@/lib/pricing'
import InvestPanel from './InvestPanel' import InvestPanel from './InvestPanel'
import { PriceChart } from '@/components/PriceChart'
export const dynamic = 'force-dynamic' export const dynamic = 'force-dynamic'
@@ -42,6 +43,16 @@ export default async function FundPage({ params }: { params: { slug: string } })
if (!fund) notFound() if (!fund) notFound()
// Fetch NAV history for the chart (last 7 days)
const navHistory = await prisma.fundNavHistory.findMany({
where: {
fundId: fund.id,
recordedAt: { gte: new Date(Date.now() - 7 * 24 * 60 * 60 * 1000) },
},
orderBy: { recordedAt: 'asc' },
select: { nav: true, recordedAt: true },
})
// Fetch current user's balance and investment in this fund // Fetch current user's balance and investment in this fund
const [currentUser, userInvestment] = session const [currentUser, userInvestment] = session
? await Promise.all([ ? await Promise.all([
@@ -97,6 +108,15 @@ export default async function FundPage({ params }: { params: { slug: string } })
</div> </div>
</div> </div>
{/* NAV history chart */}
<div className="bg-surface-card border border-surface-border rounded-xl p-4">
<h2 className="text-sm font-medium text-slate-400 mb-3">NAV / Share Last 7 Days</h2>
<PriceChart
data={navHistory.map((p) => ({ price: p.nav, recordedAt: p.recordedAt.toISOString() }))}
height={220}
/>
</div>
{/* Stats */} {/* Stats */}
<div className="grid grid-cols-2 sm:grid-cols-3 lg:grid-cols-6 gap-4"> <div className="grid grid-cols-2 sm:grid-cols-3 lg:grid-cols-6 gap-4">
{[ {[
+8
View File
@@ -42,3 +42,11 @@ export const schedulerQueue = new Queue('hashex-scheduler', {
removeOnFail: { count: 5 }, removeOnFail: { count: 5 },
}, },
}) })
export const fundNavSnapshotQueue = new Queue('hashex-fund-nav-snapshot', {
connection: redisOpts(),
defaultJobOptions: {
removeOnComplete: { count: 10 },
removeOnFail: { count: 10 },
},
})
+71 -5
View File
@@ -13,7 +13,7 @@
import { Worker, Queue } from 'bullmq' import { Worker, Queue } from 'bullmq'
import { PrismaClient } from '@prisma/client' import { PrismaClient } from '@prisma/client'
import { getPostsData } from '../lib/mastodon' import { getPostsData } from '../lib/mastodon'
import { calcPrice, calcTrade, dailyResearchPoints } from '../lib/pricing' import { calcPrice, calcTrade, dailyResearchPoints, calcFundNav } from '../lib/pricing'
// ── Connection options ──────────────────────────────────────────────────────── // ── Connection options ────────────────────────────────────────────────────────
// Use plain connection options so BullMQ uses its own bundled ioredis, // Use plain connection options so BullMQ uses its own bundled ioredis,
@@ -54,6 +54,7 @@ function activeUntilFromNow(): Date {
const priceUpdateQueue = new Queue('hashex-price-updates', { connection: redisOpts() }) const priceUpdateQueue = new Queue('hashex-price-updates', { connection: redisOpts() })
const maintenanceQueue = new Queue('hashex-maintenance', { connection: redisOpts() }) const maintenanceQueue = new Queue('hashex-maintenance', { connection: redisOpts() })
const schedulerQueue = new Queue('hashex-scheduler', { connection: redisOpts() }) const schedulerQueue = new Queue('hashex-scheduler', { connection: redisOpts() })
const fundNavSnapshotQueue = new Queue('hashex-fund-nav-snapshot', { connection: redisOpts() })
// ── Helpers ────────────────────────────────────────────────────────────────── // ── Helpers ──────────────────────────────────────────────────────────────────
@@ -284,6 +285,54 @@ const maintenanceWorker = new Worker(
{ connection: redisOpts() }, { connection: redisOpts() },
) )
/**
* Fund NAV snapshot worker — records the current NAV of every fund once per hour.
*/
const fundNavSnapshotWorker = new Worker(
'hashex-fund-nav-snapshot',
async (job) => {
console.log(`[fund-nav] snapshotting all fund NAVs (job ${job.id})`)
const funds = await prisma.hedgeFund.findMany({
select: {
id: true,
sharesOutstanding: true,
user: {
select: {
balance: true,
positions: {
where: { shares: { gt: 0 } },
select: {
shares: true,
positionType: true,
avgBuyPrice: true,
hashtag: { select: { currentPrice: true } },
},
},
},
},
},
})
for (const fund of funds) {
const portfolioValue = fund.user.positions.reduce((sum, p) => {
const val = p.positionType === 'LONG'
? p.shares * p.hashtag.currentPrice
: p.avgBuyPrice * p.shares - (p.hashtag.currentPrice - p.avgBuyPrice) * p.shares
return sum + val
}, 0)
const totalValue = fund.user.balance + portfolioValue
const nav = calcFundNav(totalValue, fund.sharesOutstanding)
await prisma.fundNavHistory.create({
data: { fundId: fund.id, nav, totalValue },
})
}
console.log(`[fund-nav] snapshotted ${funds.length} fund(s)`)
},
{ connection: redisOpts() },
)
/** /**
* Scheduler worker — triggered on a timer to enqueue price-update jobs. * Scheduler worker — triggered on a timer to enqueue price-update jobs.
* Orders hashtags by lastUpdated ASC so the most stale ones go first. * Orders hashtags by lastUpdated ASC so the most stale ones go first.
@@ -333,7 +382,12 @@ const schedulerWorker = new Worker(
// ── Error handlers ──────────────────────────────────────────────────────────── // ── Error handlers ────────────────────────────────────────────────────────────
// Worker-level connection errors (separate from per-job failures) // Worker-level connection errors (separate from per-job failures)
for (const [name, worker] of [['price', priceWorker], ['maintenance', maintenanceWorker], ['scheduler', schedulerWorker]] as const) { for (const [name, worker] of [
['price', priceWorker],
['maintenance', maintenanceWorker],
['fund-nav', fundNavSnapshotWorker],
['scheduler', schedulerWorker],
] as const) {
worker.on('error', (err) => { worker.on('error', (err) => {
console.error(`[${name}-worker] connection error:`, err.message) console.error(`[${name}-worker] connection error:`, err.message)
}) })
@@ -352,16 +406,18 @@ async function setupRepeatableJobs() {
// Always wipe existing repeatable registrations first so that: // Always wipe existing repeatable registrations first so that:
// - stale entries from old PRICE_UPDATE_INTERVAL_MINUTES values don't persist // - stale entries from old PRICE_UPDATE_INTERVAL_MINUTES values don't persist
// - jobs exhausted by BullMQ retry limits get rescheduled cleanly // - jobs exhausted by BullMQ retry limits get rescheduled cleanly
const [existingScheduler, existingMaintenance] = await Promise.all([ const [existingScheduler, existingMaintenance, existingFundNav] = await Promise.all([
schedulerQueue.getRepeatableJobs(), schedulerQueue.getRepeatableJobs(),
maintenanceQueue.getRepeatableJobs(), maintenanceQueue.getRepeatableJobs(),
fundNavSnapshotQueue.getRepeatableJobs(),
]) ])
await Promise.all([ await Promise.all([
...existingScheduler.map((j) => schedulerQueue.removeRepeatableByKey(j.key)), ...existingScheduler.map((j) => schedulerQueue.removeRepeatableByKey(j.key)),
...existingMaintenance.map((j) => maintenanceQueue.removeRepeatableByKey(j.key)), ...existingMaintenance.map((j) => maintenanceQueue.removeRepeatableByKey(j.key)),
...existingFundNav.map((j) => fundNavSnapshotQueue.removeRepeatableByKey(j.key)),
]) ])
if (existingScheduler.length || existingMaintenance.length) { if (existingScheduler.length || existingMaintenance.length || existingFundNav.length) {
console.log(`[worker] cleared ${existingScheduler.length} scheduler + ${existingMaintenance.length} maintenance repeatable(s)`) console.log(`[worker] cleared ${existingScheduler.length} scheduler + ${existingMaintenance.length} maintenance + ${existingFundNav.length} fund-nav repeatable(s)`)
} }
// Price update sweep — every N minutes // Price update sweep — every N minutes
@@ -382,6 +438,15 @@ async function setupRepeatableJobs() {
}, },
) )
// Hourly fund NAV snapshot — every hour on the hour
await fundNavSnapshotQueue.add(
'fund-nav-snapshot',
{},
{
repeat: { pattern: '0 * * * *' },
},
)
// Immediately trigger a sweep on startup so prices are fresh // Immediately trigger a sweep on startup so prices are fresh
await schedulerQueue.add('trigger-sweep', {}, { jobId: `sweep-startup-${Date.now()}` }) await schedulerQueue.add('trigger-sweep', {}, { jobId: `sweep-startup-${Date.now()}` })
@@ -401,6 +466,7 @@ async function shutdown() {
console.log('[worker] shutting down…') console.log('[worker] shutting down…')
await priceWorker.close() await priceWorker.close()
await maintenanceWorker.close() await maintenanceWorker.close()
await fundNavSnapshotWorker.close()
await schedulerWorker.close() await schedulerWorker.close()
await prisma.$disconnect() await prisma.$disconnect()
process.exit(0) process.exit(0)
+1 -1
View File
File diff suppressed because one or more lines are too long