Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
103 changes: 29 additions & 74 deletions examples/nodejs/workflows/workflows.ts
Original file line number Diff line number Diff line change
@@ -1,85 +1,40 @@
// Test script for the simplified proxy approach
// Experimental upcoming beta AI primitve.
// Please refer to the documentation for more information: https://langbase.com/docs for more information.

import 'dotenv/config';
import {Langbase} from 'langbase';
import {Langbase, Workflow} from 'langbase';

// Create Langbase instance
const langbase = new Langbase({
apiKey: process.env.LANGBASE_API_KEY!,
});

async function main() {
// Create a workflow with debug mode enabled
const workflow = langbase.workflow({
name: 'simplified-proxy-test',
debug: true, // Enable debug logging
const {step} = new Workflow({debug: true});

const result = await step({
id: 'sumamrize',
run: async () => {
return langbase.llm.run({
model: 'openai:gpt-4o-mini',
apiKey: process.env.OPENAI_API_KEY!,
messages: [
{
role: 'system',
content:
'You are an expert summarizer. Summarize the user input.',
},
{
role: 'user',
content:
'I am testing workflows. I just created an example of summarize workflow. Can you summarize this?',
},
],
stream: false,
});
},
});

try {
// STEP 1: Call langbase.agent.run but don't return its result directly
const step1Result = await workflow.step({
id: 'call-but-return-custom',
run: async () => {
// Return custom result instead
return {
customField: 'Custom result from simplified proxy',
timestamp: new Date().toISOString(),
};
},
});

// STEP 2: Return agent.run result directly
const step2Result = await workflow.step({
id: 'return-agent-run-directly',
run: async () => {
// Call Langbase API and return the result directly
return langbase.agent.run({
model: 'openai:gpt-4o-mini',
apiKey: process.env.OPENAI_API_KEY!,
instructions: 'Be brief and concise.',
input: 'What is 2+2?',
stream: false,
});
},
});

// STEP 3: Make multiple Langbase calls in one step
const step3Result = await workflow.step({
id: 'multiple-calls',
run: async () => {
// First call
const call1 = await langbase.agent.run({
model: 'openai:gpt-4o-mini',
apiKey: process.env.OPENAI_API_KEY!,
instructions: 'Be brief.',
input: 'First proxy test',
stream: false,
});

// Second call with different method
const call2 = await langbase.agent.run({
model: 'openai:gpt-4o-mini',
apiKey: process.env.OPENAI_API_KEY!,
instructions: 'Be brief.',
input: 'Second proxy test',
stream: false,
});

// Return combined result
return {
summary: 'Multiple calls completed with simplified proxy',
calls: 2,
firstOutput: call1.output,
secondOutput: call2.output,
};
},
});
} catch (error) {
console.error('❌ Workflow error:', error);
} finally {
// End the workflow to show trace report
workflow.end();
}
console.log(result['completion']);
}

// Run the test
main().catch(console.error);
main();
11 changes: 0 additions & 11 deletions packages/langbase/src/common/request.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,17 +62,6 @@ export class Request {
const isLllmGenerationEndpoint =
GENERATION_ENDPOINTS.includes(endpoint);

// All endpoints should return headers if rawResponse is true
if (!isLllmGenerationEndpoint && options.body?.rawResponse) {
const responseData = await response.json();
return {
...responseData,
rawResponse: {
headers: Object.fromEntries(response.headers.entries()),
},
} as T;
}

if (isLllmGenerationEndpoint) {
const threadId = response.headers.get('lb-thread-id');

Expand Down
26 changes: 0 additions & 26 deletions packages/langbase/src/langbase/langbase.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import {convertDocToFormData} from '@/lib/utils/doc-to-formdata';
import {Request} from '../common/request';
import {Workflow} from './workflows';

export type Role = 'user' | 'assistant' | 'system' | 'tool';

Expand Down Expand Up @@ -640,12 +639,6 @@ export class Langbase {
};
};

public workflow: (config: {debug?: boolean; name: string}) => Workflow;

public traces: {
create: (trace: any) => Promise<any>;
};

constructor(options?: LangbaseOptions) {
this.baseUrl = options?.baseUrl ?? 'https://api.langbase.com';
this.apiKey = options?.apiKey ?? '';
Expand Down Expand Up @@ -730,12 +723,6 @@ export class Langbase {
this.agent = {
run: this.runAgent.bind(this),
};

this.workflow = config => new Workflow({...config, langbase: this});

this.traces = {
create: this.createTrace.bind(this),
};
}

private async runPipe(
Expand Down Expand Up @@ -1159,17 +1146,4 @@ export class Langbase {
},
});
}

/**
* Creates a new trace on Langbase.
*
* @param {any} trace - The trace data to send.
* @returns {Promise<any>} A promise that resolves to the response of the trace creation.
*/
private async createTrace(trace: any): Promise<any> {
return this.request.post({
endpoint: '/v1/traces',
body: trace,
});
}
}
133 changes: 0 additions & 133 deletions packages/langbase/src/langbase/trace.ts

This file was deleted.

Loading
Loading