diff --git a/fern/pages/04-lemur/customize-parameters.mdx b/fern/pages/04-lemur/customize-parameters.mdx
index c4b58a6..df9da0b 100644
--- a/fern/pages/04-lemur/customize-parameters.mdx
+++ b/fern/pages/04-lemur/customize-parameters.mdx
@@ -20,12 +20,13 @@ LeMUR features the following LLMs:
- Claude 3.5 Sonnet
- Claude 3 Opus
- Claude 3 Haiku
-- Claude 3 Sonnet
+- Claude 3 Sonnet
You can switch the model by specifying the `final_model` parameter.
+
-
+
```python {3}
result = transcript.lemur.task(
@@ -41,7 +42,26 @@ result = transcript.lemur.task(
| **Claude 3.0 Sonnet** | `aai.LemurModel.claude3_sonnet` | Claude 3 Sonnet is a legacy model with a balanced combination of performance and speed for efficient, high-throughput tasks. |
-
+
+
+```python {4}
+data = {
+ "prompt": prompt,
+ "transcript_ids": [transcript_id],
+ "final_model": "anthropic/claude-3-5-sonnet"
+}
+
+result = requests.post("https://api.assemblyai.com/lemur/v3/generate/task", headers=headers, json=data)
+```
+| Model | SDK Parameter | Description |
+| --- | --- | --- |
+| **Claude 3.5 Sonnet** | `'anthropic/claude-3-5-sonnet'` | Claude 3.5 Sonnet is the most intelligent model to date, outperforming Claude 3 Opus on a wide range of evaluations, with the speed and cost of Claude 3 Sonnet. This uses Anthropic's Claude 3.5 Sonnet model version `claude-3-5-sonnet-20240620`. |
+| **Claude 3.0 Opus** | `'anthropic/claude-3-opus'` | Claude 3 Opus is good at handling complex analysis, longer tasks with many steps, and higher-order math and coding tasks. |
+| **Claude 3.0 Haiku** | `'anthropic/claude-3-haiku'` | Claude 3 Haiku is the fastest model that can execute lightweight actions. |
+| **Claude 3.0 Sonnet** | `'anthropic/claude-3-sonnet'` | Claude 3 Sonnet is a legacy model with a balanced combination of performance and speed for efficient, high-throughput tasks. |
+
+
+
```ts {4}
const { response } = await client.lemur.task({
@@ -58,75 +78,91 @@ const { response } = await client.lemur.task({
| **Claude 3.0 Sonnet** | `'anthropic/claude-3-sonnet'` | Claude 3 Sonnet is a legacy model with a balanced combination of performance and speed for efficient, high-throughput tasks. |
-
+
-```go {4}
-var params aai.LeMURTaskParams
-params.Prompt = aai.String(prompt)
-params.TranscriptIDs = []string{aai.ToString(transcript.ID)}
-params.FinalModel = "anthropic/claude-3-5-sonnet"
-result, _ := client.LeMUR.Task(ctx, params)
-```
-| Model | SDK Parameter | Description |
-| --- | --- | --- |
-| **Claude 3.5 Sonnet** | `"anthropic/claude-3-5-sonnet"` | Claude 3.5 Sonnet is the most intelligent model to date, outperforming Claude 3 Opus on a wide range of evaluations, with the speed and cost of Claude 3 Sonnet. This uses Anthropic's Claude 3.5 Sonnet model version `claude-3-5-sonnet-20240620`. |
-| **Claude 3.0 Opus** | `"anthropic/claude-3-opus"` | Claude 3 Opus is good at handling complex analysis, longer tasks with many steps, and higher-order math and coding tasks. |
-| **Claude 3.0 Haiku** | `"anthropic/claude-3-haiku"` | Claude 3 Haiku is the fastest model that can execute lightweight actions. |
-| **Claude 3.0 Sonnet** | `"anthropic/claude-3-sonnet"` | Claude 3 Sonnet is a legacy model with a balanced combination of performance and speed for efficient, high-throughput tasks. |
-
-
-
+```ts {4}
+const data = {
+ prompt: prompt,
+ transcript_ids: [transcript_id],
+ final_model: 'anthropic/claude-3-5-sonnet'
+};
-```java {4}
-var params = LemurTaskParams.builder()
- .prompt(prompt)
- .transcriptIds(List.of(transcript.getId()))
- .finalModel(LemurModel.ANTHROPIC_CLAUDE3_5_SONNET)
- .build();
+const result = await axios.post("https://api.assemblyai.com/lemur/v3/generate/task", data, { headers });
```
-| Model | SDK Parameter | Description |
+| Model | SDK Parameter | Description |
| --- | --- | --- |
-| **Claude 3.5 Sonnet** | `LemurModel.ANTHROPIC_CLAUDE3_5_SONNET` | Claude 3.5 Sonnet is the most intelligent model to date, outperforming Claude 3 Opus on a wide range of evaluations, with the speed and cost of Claude 3 Sonnet. This uses Anthropic's Claude 3.5 Sonnet model version `claude-3-5-sonnet-20240620`. |
-| **Claude 3.0 Opus** | `LemurModel.ANTHROPIC_CLAUDE3_OPUS` | Claude 3 Opus is good at handling complex analysis, longer tasks with many steps, and higher-order math and coding tasks. |
-| **Claude 3.0 Haiku** | `LemurModel.ANTHROPIC_CLAUDE3_HAIKU` | Claude 3 Haiku is the fastest model that can execute lightweight actions. |
-| **Claude 3.0 Sonnet** | `LemurModel.ANTHROPIC_CLAUDE3_SONNET` | Claude 3 Sonnet is a legacy model with a balanced combination of performance and speed for efficient, high-throughput tasks. |
+| **Claude 3.5 Sonnet** | `'anthropic/claude-3-5-sonnet'` | Claude 3.5 Sonnet is the most intelligent model to date, outperforming Claude 3 Opus on a wide range of evaluations, with the speed and cost of Claude 3 Sonnet. This uses Anthropic's Claude 3.5 Sonnet model version `claude-3-5-sonnet-20240620`. |
+| **Claude 3.0 Opus** | `'anthropic/claude-3-opus'` | Claude 3 Opus is good at handling complex analysis, longer tasks with many steps, and higher-order math and coding tasks. |
+| **Claude 3.0 Haiku** | `'anthropic/claude-3-haiku'` | Claude 3 Haiku is the fastest model that can execute lightweight actions. |
+| **Claude 3.0 Sonnet** | `'anthropic/claude-3-sonnet'` | Claude 3 Sonnet is a legacy model with a balanced combination of performance and speed for efficient, high-throughput tasks. |
```csharp {5}
-var lemurTaskParams = new LemurTaskParams
+var data = new
{
- Prompt = prompt,
- TranscriptIds = [transcript.Id],
- FinalModel = LemurModel.AnthropicClaude3_5_Sonnet
+ transcript_ids = transcriptIds,
+ prompt = prompt,
+ final_model = "anthropic/claude-3-5-sonnet"
};
+
+var content = new StringContent(JsonSerializer.Serialize(data), Encoding.UTF8, "application/json");
+using var response = await httpClient.PostAsync("https://api.assemblyai.com/lemur/v3/generate/task", content);
```
-| Model | SDK Parameter | Description |
+| Model | SDK Parameter | Description |
| --- | --- | --- |
-| **Claude 3.5 Sonnet** | `LemurModel.AnthropicClaude3_5_Sonnet` | Claude 3.5 Sonnet is the most intelligent model to date, outperforming Claude 3 Opus on a wide range of evaluations, with the speed and cost of Claude 3 Sonnet. This uses Anthropic's Claude 3.5 Sonnet model version `claude-3-5-sonnet-20240620`. |
-| **Claude 3.0 Opus** | `LemurModel.AnthropicClaude3_Opus` | Claude 3 Opus is good at handling complex analysis, longer tasks with many steps, and higher-order math and coding tasks. |
-| **Claude 3.0 Haiku** | `LemurModel.AnthropicClaude3_Haiku` | Claude 3 Haiku is the fastest model that can execute lightweight actions. |
-| **Claude 3.0 Sonnet** | `LemurModel.AnthropicClaude3_Sonnet` | Claude 3 Sonnet is a legacy model with a balanced combination of performance and speed for efficient, high-throughput tasks. |
+| **Claude 3.5 Sonnet** | `"anthropic/claude-3-5-sonnet"` | Claude 3.5 Sonnet is the most intelligent model to date, outperforming Claude 3 Opus on a wide range of evaluations, with the speed and cost of Claude 3 Sonnet. This uses Anthropic's Claude 3.5 Sonnet model version `claude-3-5-sonnet-20240620`. |
+| **Claude 3.0 Opus** | `"anthropic/claude-3-opus"` | Claude 3 Opus is good at handling complex analysis, longer tasks with many steps, and higher-order math and coding tasks. |
+| **Claude 3.0 Haiku** | `"anthropic/claude-3-haiku"` | Claude 3 Haiku is the fastest model that can execute lightweight actions. |
+| **Claude 3.0 Sonnet** | `"anthropic/claude-3-sonnet"` | Claude 3 Sonnet is a legacy model with a balanced combination of performance and speed for efficient, high-throughput tasks. |
+
-```ruby {4}
-response = client.lemur.task(
+```ruby {3}
+request = Net::HTTP::Post.new("https://api.assemblyai.com/lemur/v3/generate/task", headers)
+request.body = {
+ final_model: "anthropic/claude-3-5-sonnet",
prompt: prompt,
- transcript_ids: [transcript_id],
- final_model: AssemblyAI::Lemur::LemurModel::ANTHROPIC_CLAUDE3_5_SONNET
-)
+ transcript_ids: [transcript_id]
+}.to_json
+
+response = http.request(lemur_request)
```
| Model | SDK Parameter | Description |
| --- | --- | --- |
-| **Claude 3.5 Sonnet** | `AssemblyAI::Lemur::LemurModel::ANTHROPIC_CLAUDE3_5_SONNET` | Claude 3.5 Sonnet is the most intelligent model to date, outperforming Claude 3 Opus on a wide range of evaluations, with the speed and cost of Claude 3 Sonnet. This uses Anthropic's Claude 3.5 Sonnet model version `claude-3-5-sonnet-20240620`. |
-| **Claude 3.0 Opus** | `AssemblyAI::Lemur::LemurModel::ANTHROPIC_CLAUDE3_OPUS` | Claude 3 Opus is good at handling complex analysis, longer tasks with many steps, and higher-order math and coding tasks. |
-| **Claude 3.0 Haiku** | `AssemblyAI::Lemur::LemurModel::ANTHROPIC_CLAUDE3_HAIKU` | Claude 3 Haiku is the fastest model that can execute lightweight actions. |
-| **Claude 3.0 Sonnet** | `AssemblyAI::Lemur::LemurModel::ANTHROPIC_CLAUDE3_SONNET` | Claude 3 Sonnet is a legacy model with a balanced combination of performance and speed for efficient, high-throughput tasks. |
+| **Claude 3.5 Sonnet** | `"anthropic/claude-3-5-sonnet"` | Claude 3.5 Sonnet is the most intelligent model to date, outperforming Claude 3 Opus on a wide range of evaluations, with the speed and cost of Claude 3 Sonnet. This uses Anthropic's Claude 3.5 Sonnet model version `claude-3-5-sonnet-20240620`. |
+| **Claude 3.0 Opus** | `"anthropic/claude-3-opus"` | Claude 3 Opus is good at handling complex analysis, longer tasks with many steps, and higher-order math and coding tasks. |
+| **Claude 3.0 Haiku** | `"anthropic/claude-3-haiku"` | Claude 3 Haiku is the fastest model that can execute lightweight actions. |
+| **Claude 3.0 Sonnet** | `"anthropic/claude-3-sonnet"` | Claude 3 Sonnet is a legacy model with a balanced combination of performance and speed for efficient, high-throughput tasks. |
+
+
+
+```php {7}
+$ch = curl_init("https://api.assemblyai.com/lemur/v3/generate/task");
+curl_setopt_array($ch, [
+ CURLOPT_RETURNTRANSFER => true,
+ CURLOPT_POST => true,
+ CURLOPT_HTTPHEADER => $headers,
+ CURLOPT_POSTFIELDS => json_encode([
+ 'final_model' => 'anthropic/claude-3-5-sonnet',
+ 'prompt' => $prompt,
+ 'transcript_ids' => [$transcript_id]
+ ])
+]);
+
+$response = curl_exec($ch);
+```
+| Model | SDK Parameter | Description |
+| --- | --- | --- |
+| **Claude 3.5 Sonnet** | `"anthropic/claude-3-5-sonnet"` | Claude 3.5 Sonnet is the most intelligent model to date, outperforming Claude 3 Opus on a wide range of evaluations, with the speed and cost of Claude 3 Sonnet. This uses Anthropic's Claude 3.5 Sonnet model version `claude-3-5-sonnet-20240620`. |
+| **Claude 3.0 Opus** | `"anthropic/claude-3-opus"` | Claude 3 Opus is good at handling complex analysis, longer tasks with many steps, and higher-order math and coding tasks. |
+| **Claude 3.0 Haiku** | `"anthropic/claude-3-haiku"` | Claude 3 Haiku is the fastest model that can execute lightweight actions. |
+| **Claude 3.0 Sonnet** | `"anthropic/claude-3-sonnet"` | Claude 3 Sonnet is a legacy model with a balanced combination of performance and speed for efficient, high-throughput tasks. |
@@ -138,90 +174,113 @@ You can find more information on pricing for each model
-
+
- You can change the maximum output size in tokens by specifying the `max_output_size` parameter. Up to 4000 tokens are allowed.
-
-
-```python {3}
+```python {4}
result = transcript.lemur.task(
prompt,
+ final_model,
max_output_size=1000
)
```
-
-
- You can change the maximum output size in tokens by specifying the `max_output_size` parameter. Up to 4000 tokens are allowed.
+
-```ts {4}
-const { response } = await client.lemur.task({
- transcript_ids: [transcript.id],
- prompt,
- max_output_size: 1000
-})
+```python {5}
+data = {
+ "prompt": prompt,
+ "transcript_ids": [transcript_id],
+ "final_model": final_model,
+ "max_output_size": 1000
+}
+
+result = requests.post("https://api.assemblyai.com/lemur/v3/generate/task", headers=headers, json=data)
```
-
-
- You can change the maximum output size in tokens by specifying the `MaxOutputSize` parameter. Up to 4000 tokens are allowed.
-
+
-```go {4}
-var params aai.LeMURTaskParams
-params.Prompt = aai.String(prompt)
-params.TranscriptIDs = []string{aai.ToString(transcript.ID)}
-params.MaxOutputSize = aai.Int64(2000)
-
-result, _ := client.LeMUR.Task(ctx, params)
+```ts {5}
+const { response } = await client.lemur.task({
+ prompt,
+ transcript_ids: [transcript.id],
+ final_model,
+ max_output_size: 1000
+})
```
-
+
- You can change the maximum output size in tokens by specifying the `maxOutputSize` parameter. Up to 4000 tokens are allowed.
+```ts {5}
+const data = {
+ transcript_ids: [transcript_id],
+ prompt: prompt,
+ final_model: final_model,
+ max_output_size: 1000
+};
-```java {4}
-var params = LemurTaskParams.builder()
- .prompt(prompt)
- .transcriptIds(List.of(transcript.getId()))
- .maxOutputSize(1000)
- .build();
+const result = await axios.post("https://api.assemblyai.com/lemur/v3/generate/task", data, { headers });
```
- You can change the maximum output size in tokens by specifying the `MaxOutputSize` parameter. Up to 4000 tokens are allowed.
-```csharp {5}
-var lemurTaskParams = new LemurTaskParams
+```csharp {6}
+var data = new
{
- Prompt = prompt,
- TranscriptIds = [transcript.Id],
- MaxOutputSize = 1000
+ transcript_ids = transcriptIds,
+ prompt = prompt,
+ final_model = final_model,
+ max_output_size = 1000
};
+
+var content = new StringContent(JsonSerializer.Serialize(data), Encoding.UTF8, "application/json");
+using var response = await httpClient.PostAsync("https://api.assemblyai.com/lemur/v3/generate/task", content);
```
- You can change the maximum output size in tokens by specifying the `max_output_size` parameter. Up to 4000 tokens are allowed.
-
-```ruby {4}
-response = client.lemur.task(
- prompt: prompt,
+```ruby {6}
+request = Net::HTTP::Post.new("https://api.assemblyai.com/lemur/v3/generate/task", headers)
+request.body = {
transcript_ids: [transcript_id],
+ prompt: prompt,
+ final_model: final_model,
max_output_size: 1000
-)
+}.to_json
+
+response = http.request(lemur_request)
+```
+
+
+
+
+```php {10}
+$ch = curl_init("https://api.assemblyai.com/lemur/v3/generate/task");
+curl_setopt_array($ch, [
+ CURLOPT_RETURNTRANSFER => true,
+ CURLOPT_POST => true,
+ CURLOPT_HTTPHEADER => $headers,
+ CURLOPT_POSTFIELDS => json_encode([
+ 'transcript_ids' => [$transcript_id]
+ 'prompt' => $prompt,
+ 'final_model' => $final_model,
+ 'max_output_size' => 1000
+ ])
+]);
+
+$response = curl_exec($ch);
```
@@ -238,70 +297,109 @@ You can change the temperature by specifying the `temperature` parameter, rangin
Higher values result in answers that are more creative, lower values are more conservative.
-
+
-```python {3}
+```python {4}
result = transcript.lemur.task(
prompt,
+ final_model,
temperature=0.7
)
```
-
-```ts {4}
-const { response } = await client.lemur.task({
- transcript_ids: [transcript.id],
- prompt,
- temperature: 0.7
-})
+
+
+```python {5}
+data = {
+ "prompt": prompt,
+ "transcript_ids": [transcript_id],
+ "final_model": final_model,
+ "temperature": 0.7
+}
+
+result = requests.post("https://api.assemblyai.com/lemur/v3/generate/task", headers=headers, json=data)
```
-
-```go {4}
-var params aai.LeMURTaskParams
-params.Prompt = aai.String(prompt)
-params.TranscriptIDs = []string{aai.ToString(transcript.ID)}
-params.Temperature = aai.Float64(0.7)
+
+
+```ts {5}
+const { response } = await client.lemur.task({
+ prompt,
+ transcript_ids: [transcript.id],
+ final_model,
+ temperature: 0.7
+})
-result, _ := client.LeMUR.Task(ctx, params)
```
-
+
-```java {4}
-var params = LemurTaskParams.builder()
- .prompt(prompt)
- .transcriptIds(List.of(transcript.getId()))
- .temperature(0.7)
- .build();
+```ts {5}
+const data = {
+ transcript_ids: [transcript_id],
+ prompt: prompt,
+ final_model: final_model,
+ temperature: 0.7
+};
+
+const result = await axios.post("https://api.assemblyai.com/lemur/v3/generate/task", data, { headers });
```
-```csharp {5}
-var lemurTaskParams = new LemurTaskParams
+```csharp {6}
+var data = new
{
- Prompt = prompt,
- TranscriptIds = [transcript.Id],
- Temperature = 0.7f
+ transcript_ids = transcriptIds,
+ prompt,
+ final_model = final_model,
+ temperature = 0.7
};
+
+var content = new StringContent(JsonSerializer.Serialize(data), Encoding.UTF8, "application/json");
+using var response = await httpClient.PostAsync("https://api.assemblyai.com/lemur/v3/generate/task", content);
```
-```ruby {4}
-response = client.lemur.task(
- prompt: prompt,
+```ruby {6}
+request = Net::HTTP::Post.new("https://api.assemblyai.com/lemur/v3/generate/task", headers)
+request.body = {
transcript_ids: [transcript_id],
+ prompt: prompt,
+ final_model: final_model,
temperature: 0.7
-)
+}.to_json
+
+response = http.request(lemur_request)
+
+```
+
+
+
+
+```php {10}
+$ch = curl_init("https://api.assemblyai.com/lemur/v3/generate/task");
+curl_setopt_array($ch, [
+ CURLOPT_RETURNTRANSFER => true,
+ CURLOPT_POST => true,
+ CURLOPT_HTTPHEADER => $headers,
+ CURLOPT_POSTFIELDS => json_encode([
+ 'transcript_ids' => [$transcript_id]
+ 'prompt' => $prompt,
+ 'final_model' => $final_model,
+ 'temperature' => 0.7
+ ])
+]);
+
+$response = curl_exec($ch);
```
@@ -315,158 +413,140 @@ response = client.lemur.task(
You can submit custom text inputs to LeMUR without transcript IDs. This allows you to customize the input, for example, you could include the speaker labels for the LLM.
+To submit custom text input, use the `input_text` parameter
-
-
- To submit custom text input, use the `input_text` parameter on `aai.Lemur().task()`.
-
-
-```python {12}
-config = aai.TranscriptionConfig(
- speaker_labels=True,
-)
-transcript = transcriber.transcribe(audio_url, config=config)
+
+```python {8}
text_with_speaker_labels = ""
for utt in transcript.utterances:
text_with_speaker_labels += f"Speaker {utt.speaker}:\n{utt.text}\n"
result = aai.Lemur().task(
prompt,
+ final_model,
input_text=text_with_speaker_labels
)
```
-
-
- To submit custom text input, use the `input_text` parameter instead of `transcript_ids`.
+
-```ts {14}
-const params = {
- audio: audioUrl,
- speaker_labels: true
-}
-const transcript = await client.transcripts.transcribe(params)
+```python {8}
+text_with_speaker_labels = ""
+for utt in transcript["utterances"]:
+ text_with_speaker_labels += f"Speaker {utt["speaker"]}:\n{utt["text"]}\n"
-const textWithSpeakerLabels = ''
-for (let utterance of transcript.utterances!) {
- textWithSpeakerLabels += `Speaker ${utterance.speaker}:\n${utterance.text}\n`
+data = {
+ "prompt": prompt,
+ "final_model": final_model,
+ "input_text": text_with_speaker_labels
}
-const { response } = await client.lemur.task({
- prompt: prompt,
- input_text: textWithSpeakerLabels
-})
+result = requests.post("https://api.assemblyai.com/lemur/v3/generate/task", headers=headers, json=data)
```
-
-
- To submit custom text input, use the `InputText` parameter instead of `TranscriptIDs`.
+
-```go {16}
-transcript, _ := client.Transcripts.TranscribeFromURL(ctx, audioURL, &aai.TranscriptOptionalParams{
- SpeakerLabels: aai.Bool(true),
-})
-
-var textWithSpeakerLabels string
-
-for _, utterance := range transcript.Utterances {
- textWithSpeakerLabels += fmt.Sprintf("Speaker %s:\n%s\n",
- aai.ToString(utterance.Speaker),
- aai.ToString(utterance.Text),
- )
+```ts {9}
+let textWithSpeakerLabels = '';
+for (const utt of transcript.utterances) {
+ textWithSpeakerLabels += `Speaker ${utt.speaker}:\n${utt.text}\n`;
}
-var params aai.LeMURTaskParams
-params.Prompt = aai.String(prompt)
-params.InputText = aai.String(textWithSpeakerLabels)
-
-result, _ := client.LeMUR.Task(ctx, params)
+const { response } = await client.lemur.task({
+ prompt: prompt,
+ final_model: final_model,
+ input_text: textWithSpeakerLabels
+});
```
-
-
- To submit custom text input, use the `.inputText()` method instead of `.transcriptIds()`.
-
-
-```java {15}
-var params = TranscriptOptionalParams.builder()
- .speakerLabels(true)
- .build();
+
-Transcript transcript = client.transcripts().transcribe(audioUrl, params);
+```ts {9}
+let textWithSpeakerLabels = '';
+for (const utt of transcript.utterances) {
+ textWithSpeakerLabels += `Speaker ${utt.speaker}:\n${utt.text}\n`;
+}
-String textWithSpeakerLabels = transcript.getUtterances()
- .map(utterances -> utterances.stream()
- .map(utterance -> "Speaker " + utterance.getSpeaker() + ":\n" + utterance.getText() + "\n")
- .collect(Collectors.joining()))
- .orElse("");
+const data = {
+ prompt: prompt,
+ final_model: final_model,
+ input_text: textWithSpeakerLabels
+};
-var response = client.lemur().task(LemurTaskParams.builder()
- .prompt(prompt)
- .inputText(textWithSpeakerLabels)
- .build());
+const result = await axios.post("https://api.assemblyai.com/lemur/v3/generate/task", data, { headers });
```
-
- To submit custom text input, use the `InputText` parameter instead of `TranscriptIds`.
-
+
-```csharp {15}
-var transcript = await client.Transcripts.TranscribeAsync(new TranscriptParams
+```csharp {11}
+string textWithSpeakerLabels = "";
+foreach (var utt in transcript.utterances)
{
- AudioUrl = "https://assembly.ai/sports_injuries.mp3",
- SpeakerLabels = true
-});
-
-var textWithSpeakerLabels = string.Join(
- "",
- transcript.Utterances!.Select(utterance => $"Speaker {utterance.Speaker}:\n{utterance.Text}\n")
-);
+ textWithSpeakerLabels += $"Speaker {utt.speaker}:\n{utt.text}\n";
+}
-var lemurTaskParams = new LemurTaskParams
+var data = new
{
- Prompt = prompt,
- InputText = textWithSpeakerLabels
+ prompt,
+ final_model = final_model,
+ input_text = textWithSpeakerLabels
};
-var response = await client.Lemur.TaskAsync(lemurTaskParams);
+var content = new StringContent(JsonSerializer.Serialize(data), Encoding.UTF8, "application/json");
+using var response = await httpClient.PostAsync("https://api.assemblyai.com/lemur/v3/generate/task", content);
```
-
- To submit custom text input, use the `input_text` parameter instead of `transcript_ids`.
-
-```ruby {16}
-transcript = client.transcripts.transcribe(
- audio_url: audio_url,
- speaker_labels: true
-)
-
-text_with_speaker_labels = (transcript.utterances.map do |utterance|
- sprintf(
- "Speaker %s:\n%s\n",
- speaker: utterance.speaker,
- text: utterance.text
- )
-end).join("\n")
+```ruby {10}
+text_with_speaker_labels = ""
+transcript["utterances"].each do |utt|
+ text_with_speaker_labels += "Speaker #{utt.speaker}:\n#{utt.text}\n"
+end
-response = client.lemur.task(
+request = Net::HTTP::Post.new("https://api.assemblyai.com/lemur/v3/generate/task", headers)
+request.body = {
prompt: prompt,
+ final_model: final_model,
input_text: text_with_speaker_labels
-)
+}.to_json
+
+response = http.request(lemur_request)
+```
+
+
-puts response.response
+
+
+```php {14}
+$text_with_speaker_labels = "";
+foreach ($transcript['utterances'] as $utt) {
+ $text_with_speaker_labels .= "Speaker {$utt['speaker']}:\n{$utt['text']}\n";
+}
+
+$ch = curl_init("https://api.assemblyai.com/lemur/v3/generate/task");
+curl_setopt_array($ch, [
+ CURLOPT_RETURNTRANSFER => true,
+ CURLOPT_POST => true,
+ CURLOPT_HTTPHEADER => $headers,
+ CURLOPT_POSTFIELDS => json_encode([
+ 'prompt' => $prompt,
+ 'final_model' => $final_model,
+ 'input_text' => $text_with_speaker_labels
+ ])
+]);
+
+$response = curl_exec($ch);
```
@@ -482,7 +562,7 @@ LeMUR can easily ingest multiple transcripts in a single API call.
You can feed in up to a maximum of 100 files or 100 hours, whichever is lower.
-
+
```python {1-7}
transcript_group = transcriber.transcribe_group(
@@ -502,7 +582,21 @@ result = transcript_group.lemur.task(
```
-
+
+
+
+```python {3}
+data = {
+ "prompt": prompt,
+ "transcript_ids": [id1, id2, id3],
+ "final_model": final_model,
+}
+
+result = requests.post("https://api.assemblyai.com/lemur/v3/generate/task", headers=headers, json=data)
+```
+
+
+
```ts {2}
const { response } = await client.lemur.task({
@@ -512,45 +606,66 @@ const { response } = await client.lemur.task({
```
-
-
-```go {2}
-var params aai.LeMURTaskParams
-params.TranscriptIDs = []string{id1, id2, id3}
-params.Prompt = aai.String("Provide a summary of these customer calls.")
+
-result, _ := client.LeMUR.Task(ctx, params)
-```
+```ts {2}
+const data = {
+ transcript_ids: [id1, id2, id3],
+ prompt: prompt,
+ final_model: final_model,
+};
-
-
+const result = await axios.post("https://api.assemblyai.com/lemur/v3/generate/task", data, { headers });
-```java {3}
-var response = client.lemur().task(LemurTaskParams.builder()
- .prompt(prompt)
- .transcriptIds(List.of(id1, id2, id3))
- .build());
```
+
-```csharp {4}
-var lemurTaskParams = new LemurTaskParams
+```csharp {3}
+var data = new
{
- Prompt = prompt,
- TranscriptIds = [id1, id2, id3]
+ transcript_ids = new List { id1, id2, id3 },
+ prompt = prompt,
+ final_model = final_model,
};
+
+var content = new StringContent(JsonSerializer.Serialize(data), Encoding.UTF8, "application/json");
+using var response = await httpClient.PostAsync("https://api.assemblyai.com/lemur/v3/generate/task", content);
```
```ruby {3}
-response = client.lemur.task(
+request = Net::HTTP::Post.new("https://api.assemblyai.com/lemur/v3/generate/task", headers)
+request.body = {
+ transcript_ids: [id1, id2, id3],
prompt: prompt,
- transcript_ids: [id1, id2, id3]
-)
+ final_model: final_model
+}.to_json
+
+response = http.request(request)
+```
+
+
+
+
+```php {7}
+$ch = curl_init("https://api.assemblyai.com/lemur/v3/generate/task");
+curl_setopt_array($ch, [
+ CURLOPT_RETURNTRANSFER => true,
+ CURLOPT_POST => true,
+ CURLOPT_HTTPHEADER => $headers,
+ CURLOPT_POSTFIELDS => json_encode([
+ 'transcript_ids' => [$id1, $id2, $id3],
+ 'prompt' => $prompt,
+ 'final_model' => $final_model
+ ])
+]);
+
+$response = curl_exec($ch);
```
@@ -567,7 +682,7 @@ You can delete the data for a previously submitted LeMUR request.
Response data from the LLM, as well as any context provided in the original request will be removed.
-
+
```python {3}
result = transcript.lemur.task(prompt)
@@ -576,7 +691,18 @@ deletion_response = aai.Lemur.purge_request_data(result.request_id)
```
-
+
+
+```python {5}
+# First get the request_id from a previous LeMUR task response
+request_id = result.json()["request_id"]
+
+delete_url = f"https://api.assemblyai.com/lemur/v3/{request_id}"
+deletion_response = requests.delete(delete_url, headers=headers)
+```
+
+
+
```ts {6}
const { response, request_id } = await client.lemur.task({
@@ -588,36 +714,56 @@ const deletionResponse = await client.lemur.purgeRequestData(request_id)
```
-
+
-```java {6}
-var response = client.lemur().task(LemurTaskParams.builder()
- .prompt(prompt)
- .transcriptIds(List.of(transcript.getId()))
- .build());
+```ts {5}
+// First get the request_id from a previous LeMUR task response
+const request_id = result.data.request_id;
-var deletionResponse = client.lemur().purgeRequestData(response.getRequestId());
+const delete_url = `https://api.assemblyai.com/lemur/v3/${request_id}`;
+const deletion_response = await axios.delete(delete_url, { headers });
```
-```csharp {3}
-var response = await client.Lemur.TaskAsync(lemurTaskParams);
+```csharp {5}
+// First get the request_id from a previous LeMUR task response
+string request_id = lemurResponse.RequestId;
-var deletionResponse = await client.Lemur.PurgeRequestDataAsync(response.RequestId);
+string delete_url = $"https://api.assemblyai.com/lemur/v3/{request_id}";
+using var deletion_response = await httpClient.DeleteAsync(delete_url);
```
```ruby {6}
-response = client.lemur.task(
- prompt: prompt,
- transcript_ids: [transcript_id],
-)
+# First get the request_id from a previous LeMUR task response
+request_id = lemur_result["request_id"]
+
+delete_uri = URI("#{base_url}/lemur/v3/#{request_id}")
+delete_request = Net::HTTP::Delete.new(delete_uri, headers)
+deletion_response = http.request(delete_request)
+```
+
+
+
+
+```php {12}
+// First get the request_id from a previous LeMUR task response
+$request_id = $result['request_id'];
+
+$delete_url = "https://api.assemblyai.com/lemur/v3/{$request_id}";
+$ch = curl_init($delete_url);
+curl_setopt_array($ch, [
+ CURLOPT_RETURNTRANSFER => true,
+ CURLOPT_CUSTOMREQUEST => "DELETE",
+ CURLOPT_HTTPHEADER => $headers
+]);
-deletion_response = client.lemur.purge_request_data(request_id: response.request_id)
+$deletion_response = curl_exec($ch);
+curl_close($ch);
```