1import { PragmasAI } from "@pragmas/core";
2import { Pipeline, Transform } from "@pragmas/ml";
3import { connect, Schema } from "@pragmas/db";
4
5interface PredictionResult {
6 confidence: number;
7 category: string;
8 metadata: Record<string, unknown>;
9}
10
11const pipeline = new Pipeline({
12 name: "revenue-predictor",
13 transforms: [
14 Transform.normalize({ fields: ['revenue', 'growth'] }),
15 Transform.encode({ strategy: 'one-hot' }),
16 Transform.feature({ extract: true }),
17 ],
18 validation: { splitRatio: 0.2 },
19});
20
21async function analyzeTrends(
22 data: DataStream,
23 config: AnalysisConfig
24): Promise<PredictionResult[]> {
25 const ai = new PragmasAI({
26 model: "custom-v3",
27 temperature: 0.2,
28 maxTokens: 4096,
29 });
30
31 const processed = await pipeline.run(data);
32 const embeddings = await ai.embed(processed, {
33 dimensions: 1536,
34 pooling: "mean",
35 });
36
37 const clusters = await ai.cluster(embeddings, {
38 algorithm: "hdbscan",
39 minClusterSize: 5,
40 });
41
42 return clusters.map((cluster) => ({
43 confidence: cluster.score,
44 category: cluster.label,
45 metadata: {
46 size: cluster.points.length,
47 centroid: cluster.centroid,
48 },
49 }));
50}
51
52export async function POST(req: Request) {
53 const { dataset, options } = await req.json();
54 const db = await connect(process.env.DB_URL);
55
56 const stream = db.stream(dataset, {
57 batchSize: 1000,
58 parallel: true,
59 });
60
61 const results = await analyzeTrends(stream, {
62 depth: options.depth ?? 3,
63 timeRange: options.range,
64 includeOutliers: false,
65 });
66
67 await db.insert('predictions', results);
68
69 return Response.json({
70 success: true,
71 predictions: results.length,
72 avgConfidence: mean(results.map(r => r.confidence)),
73 });
74}
1
2
3import { PragmasAI } from "@pragmas/core";
4import { Pipeline, Transform } from "@pragmas/ml";
5import { connect, Schema } from "@pragmas/db";
6
7interface PredictionResult {
8 confidence: number;
9 category: string;
10 metadata: Record<string, unknown>;
11}
12
13const pipeline = new Pipeline({
14 name: "revenue-predictor",
15 transforms: [
16 Transform.normalize({ fields: ['revenue', 'growth'] }),
17 Transform.encode({ strategy: 'one-hot' }),
18 Transform.feature({ extract: true }),
19 ],
20 validation: { splitRatio: 0.2 },
21});
22
23async function analyzeTrends(
24 data: DataStream,
25 config: AnalysisConfig
26): Promise<PredictionResult[]> {
27 const ai = new PragmasAI({
28 model: "custom-v3",
29 temperature: 0.2,
30 maxTokens: 4096,
31 });
32
33 const processed = await pipeline.run(data);
34 const embeddings = await ai.embed(processed, {
35 dimensions: 1536,
36 pooling: "mean",
37 });
38
39 const clusters = await ai.cluster(embeddings, {
40 algorithm: "hdbscan",
41 minClusterSize: 5,
42 });
43
44 return clusters.map((cluster) => ({
45 confidence: cluster.score,
46 category: cluster.label,
47 metadata: {
48 size: cluster.points.length,
49 centroid: cluster.centroid,
50 },
51 }));
52}
53
54export async function POST(req: Request) {
55 const { dataset, options } = await req.json();
56 const db = await connect(process.env.DB_URL);
57
58 const stream = db.stream(dataset, {
59 batchSize: 1000,
60 parallel: true,
61 });
62
63 const results = await analyzeTrends(stream, {
64 depth: options.depth ?? 3,
65 timeRange: options.range,
66 includeOutliers: false,
67 });
68
69 await db.insert('predictions', results);
70
71 return Response.json({
72 success: true,
73 predictions: results.length,
74 avgConfidence: mean(results.map(r => r.confidence)),
1 });
2}