1

Here's server.js:

const express = require("express");
const cors = require("cors");
const axios = require("axios");
const app = express();

app.use(cors());
app.use(express.json());

app.post("/", async (req, res) => {
  try {
    const apiKey = "sk-xxxxxxxxxxxxxxxxxxxxx";
    const prompt = req.body.prompt; // You would need to send the prompt from your Flutter app

    const response = await axios.post(
      "https://api.openai.com/v1/chat/completions",
      {
        model: "gpt-3.5-turbo",
        temperature: 1,
        messages: prompt,
      },
      {
        headers: {
          Authorization: `Bearer ${apiKey}`,
        },
      }
    );

    res.json(response.data);
  } catch (error) {
    console.error("Error:", error);
  }
});

app.listen(process.env.PORT || 5000, function () {
  console.log(
    "Express server listening on port %d in %s mode",
    this.address().port,
    app.settings.env
  );
});

Here's the sendPrompt() function in flutter:

Future<String> sendPrompt(prompt) async {
const url =
          'https://xxxxxxx-xxxxxxxx.herokuapp.com/'; // Replace with your actual URL
      final response = await http.post(
        Uri.parse(url),
        headers: <String, String>{
          'Content-Type': 'application/json',
        },
        body: '{"prompt": "$prompt"}',
      );

      if (response.statusCode == 200) {
        final data = jsonDecode(response.body);
        final text = data['choices'][0]['message']['content'].toString();
        return text;
      } else {
        throw Exception('Failed to send prompt ${response.statusCode}');
      }
}

Here's the prompt:

[{role: system, content: You are a helpful AI Assistant.}, {role: user, content: hi}]

Here's what running heroku logs --tail in the terminal returned:

2023-08-14T08:25:23.412773+00:00 app[web.1]: 'openai-organization': 'user-xxxxxxxx',
2023-08-14T08:25:23.412773+00:00 app[web.1]: 'openai-processing-ms': '4',
2023-08-14T08:25:23.412774+00:00 app[web.1]: 'openai-version': '2020-10-01',
2023-08-14T08:25:23.412774+00:00 app[web.1]: 'strict-transport-security': 'max-age=15724800; includeSubDomains',
2023-08-14T08:25:23.412774+00:00 app[web.1]: 'x-ratelimit-limit-requests': '3500',
2023-08-14T08:25:23.412774+00:00 app[web.1]: 'x-ratelimit-remaining-requests': '3499',
2023-08-14T08:25:23.412775+00:00 app[web.1]: 'x-ratelimit-reset-requests': '17ms',
2023-08-14T08:25:23.412775+00:00 app[web.1]: 'x-request-id': 'xxxxxxxxx',
2023-08-14T08:25:23.412775+00:00 app[web.1]: 'cf-cache-status': 'DYNAMIC',
2023-08-14T08:25:23.412775+00:00 app[web.1]: server: 'cloudflare',
2023-08-14T08:25:23.412775+00:00 app[web.1]: 'cf-ray': '7f67ce90bede2f24-IAD',
2023-08-14T08:25:23.412776+00:00 app[web.1]: 'alt-svc': 'h3=":443"; ma=86400'
2023-08-14T08:25:23.412776+00:00 app[web.1]: },
2023-08-14T08:25:23.412776+00:00 app[web.1]: config: {
2023-08-14T08:25:23.412778+00:00 app[web.1]: transitional: [Object],
2023-08-14T08:25:23.412778+00:00 app[web.1]: adapter: [Array],
2023-08-14T08:25:23.412778+00:00 app[web.1]: transformRequest: [Array],
2023-08-14T08:25:23.412779+00:00 app[web.1]: transformResponse: [Array],
2023-08-14T08:25:23.412779+00:00 app[web.1]: timeout: 0,
2023-08-14T08:25:23.412779+00:00 app[web.1]: xsrfCookieName: 'XSRF-TOKEN',
2023-08-14T08:25:23.412779+00:00 app[web.1]: xsrfHeaderName: 'X-XSRF-TOKEN',
2023-08-14T08:25:23.412780+00:00 app[web.1]: maxContentLength: -1,
2023-08-14T08:25:23.412780+00:00 app[web.1]: maxBodyLength: -1,
2023-08-14T08:25:23.412780+00:00 app[web.1]: env: [Object],
2023-08-14T08:25:23.412780+00:00 app[web.1]: validateStatus: [Function: validateStatus],
2023-08-14T08:25:23.412780+00:00 app[web.1]: headers: [AxiosHeaders],
2023-08-14T08:25:23.412781+00:00 app[web.1]: method: 'post',
2023-08-14T08:25:23.412781+00:00 app[web.1]: url: 'https://api.openai.com/v1/chat/completions',
2023-08-14T08:25:23.412781+00:00 app[web.1]: data: `{"model":"gpt-3.5-turbo","temperature":1,"messages":"[{role: system, content: You are a helpful AI Assistant.}, {role: user, content: hi}]"}`
2023-08-14T08:25:23.412781+00:00 app[web.1]: },
2023-08-14T08:25:23.412782+00:00 app[web.1]: request: <ref *1> ClientRequest {
2023-08-14T08:25:23.412782+00:00 app[web.1]: _events: [Object: null prototype],
2023-08-14T08:25:23.412782+00:00 app[web.1]: _eventsCount: 7,
2023-08-14T08:25:23.412782+00:00 app[web.1]: _maxListeners: undefined,
2023-08-14T08:25:23.412782+00:00 app[web.1]: outputData: [],
2023-08-14T08:25:23.412783+00:00 app[web.1]: outputSize: 0,
2023-08-14T08:25:23.412783+00:00 app[web.1]: writable: true,
2023-08-14T08:25:23.412783+00:00 app[web.1]: destroyed: false,
2023-08-14T08:25:23.412783+00:00 app[web.1]: _last: true,
2023-08-14T08:25:23.412783+00:00 app[web.1]: chunkedEncoding: false,
2023-08-14T08:25:23.412783+00:00 app[web.1]: shouldKeepAlive: false,
2023-08-14T08:25:23.412784+00:00 app[web.1]: maxRequestsOnConnectionReached: false,
2023-08-14T08:25:23.412784+00:00 app[web.1]: _defaultKeepAlive: true,
2023-08-14T08:25:23.412784+00:00 app[web.1]: useChunkedEncodingByDefault: true,
2023-08-14T08:25:23.412784+00:00 app[web.1]: sendDate: false,
2023-08-14T08:25:23.412784+00:00 app[web.1]: _removedConnection: false,
2023-08-14T08:25:23.412785+00:00 app[web.1]: _removedContLen: false,
2023-08-14T08:25:23.412785+00:00 app[web.1]: _removedTE: false,
2023-08-14T08:25:23.412785+00:00 app[web.1]: strictContentLength: false,
2023-08-14T08:25:23.412785+00:00 app[web.1]: _contentLength: '730',
2023-08-14T08:25:23.412785+00:00 app[web.1]: _hasBody: true,
2023-08-14T08:25:23.412786+00:00 app[web.1]: _trailer: '',
2023-08-14T08:25:23.412786+00:00 app[web.1]: finished: true,
2023-08-14T08:25:23.412786+00:00 app[web.1]: _headerSent: true,
2023-08-14T08:25:23.412786+00:00 app[web.1]: _closed: false,
2023-08-14T08:25:23.412786+00:00 app[web.1]: socket: [TLSSocket],
2023-08-14T08:25:23.412787+00:00 app[web.1]: _header: 'POST /v1/chat/completions HTTP/1.1\r\n' +
2023-08-14T08:25:23.412787+00:00 app[web.1]: 'Accept: application/json, text/plain, */*\r\n' +
2023-08-14T08:25:23.412787+00:00 app[web.1]: 'Content-Type: application/json\r\n' +
2023-08-14T08:25:23.412787+00:00 app[web.1]: 'Authorization: Bearer sk-xxxxxxxxxx\r\n' +
2023-08-14T08:25:23.412788+00:00 app[web.1]: 'User-Agent: axios/1.4.0\r\n' +
2023-08-14T08:25:23.412788+00:00 app[web.1]: 'Content-Length: 730\r\n' +
2023-08-14T08:25:23.412788+00:00 app[web.1]: 'Accept-Encoding: gzip, compress, deflate, br\r\n' +
2023-08-14T08:25:23.412788+00:00 app[web.1]: 'Host: api.openai.com\r\n' +
2023-08-14T08:25:23.412788+00:00 app[web.1]: 'Connection: close\r\n' +
2023-08-14T08:25:23.412788+00:00 app[web.1]: '\r\n',
2023-08-14T08:25:23.412789+00:00 app[web.1]: _keepAliveTimeout: 0,
2023-08-14T08:25:23.412789+00:00 app[web.1]: _onPendingData: [Function: nop],
2023-08-14T08:25:23.412789+00:00 app[web.1]: agent: [Agent],
2023-08-14T08:25:23.412789+00:00 app[web.1]: socketPath: undefined,
2023-08-14T08:25:23.412789+00:00 app[web.1]: method: 'POST',
2023-08-14T08:25:23.412790+00:00 app[web.1]: maxHeaderSize: undefined,
2023-08-14T08:25:23.412790+00:00 app[web.1]: insecureHTTPParser: undefined,
2023-08-14T08:25:23.412790+00:00 app[web.1]: joinDuplicateHeaders: undefined,
2023-08-14T08:25:23.412790+00:00 app[web.1]: path: '/v1/chat/completions',
2023-08-14T08:25:23.412790+00:00 app[web.1]: _ended: true,
2023-08-14T08:25:23.412791+00:00 app[web.1]: res: [IncomingMessage],
2023-08-14T08:25:23.412791+00:00 app[web.1]: aborted: false,
2023-08-14T08:25:23.412791+00:00 app[web.1]: timeoutCb: null,
2023-08-14T08:25:23.412791+00:00 app[web.1]: upgradeOrConnect: false,
2023-08-14T08:25:23.412791+00:00 app[web.1]: parser: null,
2023-08-14T08:25:23.412791+00:00 app[web.1]: maxHeadersCount: null,
2023-08-14T08:25:23.412792+00:00 app[web.1]: reusedSocket: false,
2023-08-14T08:25:23.412792+00:00 app[web.1]: host: 'api.openai.com',
2023-08-14T08:25:23.412792+00:00 app[web.1]: protocol: 'https:',
2023-08-14T08:25:23.412792+00:00 app[web.1]: _redirectable: [Writable],
2023-08-14T08:25:23.412792+00:00 app[web.1]: [Symbol(kCapture)]: false,
2023-08-14T08:25:23.412792+00:00 app[web.1]: [Symbol(kBytesWritten)]: 0,
2023-08-14T08:25:23.412793+00:00 app[web.1]: [Symbol(kNeedDrain)]: false,
2023-08-14T08:25:23.412793+00:00 app[web.1]: [Symbol(corked)]: 0,
2023-08-14T08:25:23.412793+00:00 app[web.1]: [Symbol(kOutHeaders)]: [Object: null prototype],
2023-08-14T08:25:23.412793+00:00 app[web.1]: [Symbol(errored)]: null,
2023-08-14T08:25:23.412793+00:00 app[web.1]: [Symbol(kHighWaterMark)]: 16384,
2023-08-14T08:25:23.412795+00:00 app[web.1]: [Symbol(kRejectNonStandardBodyWrites)]: false,
2023-08-14T08:25:23.412795+00:00 app[web.1]: [Symbol(kUniqueHeaders)]: null
2023-08-14T08:25:23.412796+00:00 app[web.1]: },
2023-08-14T08:25:23.412796+00:00 app[web.1]: data: { error: [Object] }
2023-08-14T08:25:23.412796+00:00 app[web.1]: }
2023-08-14T08:25:23.412796+00:00 app[web.1]: }
2023-08-14T08:25:53.261588+00:00 heroku[router]: at=error code=H12 desc="Request timeout" method=POST path="/" host=xxxxx-xxxxxxx.herokuapp.com request_id=89b32d06-75b6-4579-af4f-77ea27e42e18 fwd="152.58.97.183" dyno=web.1 connect=0ms service=30000ms status=503 bytes=0 protocol=https

Maybe the request syntax is wrong. What could be causing this error?

Hammad Nasir
  • 2,889
  • 7
  • 52
  • 133

3 Answers3

2

You may have missed the Content-Type config, according to the OpenAI API's API.

// ...

    const response = await axios.post(
      "https://api.openai.com/v1/chat/completions",
      {
        model: "gpt-3.5-turbo",
        temperature: 1,
        messages: prompt,
      },
      {
        headers: {
          Authorization: `Bearer ${apiKey}`,
          "Content-Type": "application/json", // ✅
        },
      }
    );

refs

enter image description here

https://platform.openai.com/docs/api-reference/making-requests

xgqfrms
  • 10,077
  • 1
  • 69
  • 68
  • I have tried this. Error remains. Could this be the problem `data: \`{"model":"gpt-3.5-turbo","temperature":1,"messages":"[{role: system, content: You are a helpful AI Assistant.}, {role: user, content: hi}]"}\`` as there are no double quotes over the values of role and content? – Hammad Nasir Aug 17 '23 at 14:08
  • Yes, just follow the official API usage method. – xgqfrms Aug 20 '23 at 14:58
0

use console.log(prompt) in your server.js file. It seems like you are sending the prompt object as a string from flutter.

Farhan Salam
  • 1,257
  • 11
  • 16
0

In your flutter function sendPrompt you are calling http.post with the 2. parameter (object) without surrounding curly brackets:

final response = await http.post(
    Uri.parse(url),
    { headers: <String, String>{
      'Content-Type': 'application/json',
    },
    body: '{"prompt": "$prompt"}' }
)
Terje Rosenlund
  • 153
  • 1
  • 9
  • @Hammad Nasir I have pointed out an obvious error in your request without you commenting or accepting the answer Is this a way to avoid handing out the promised bounty? – Terje Rosenlund Aug 25 '23 at 08:39