helix-gpt: Undefined is not an object

I’ve set up helix-gpt (I think properly, see below), but when editing a Python file, I get

2024-01-26T14:08:07.382 helix_lsp::transport [ERROR] gpt err <- "TypeError: undefined is not an object (evaluating 'this.contents.split')\n"
2024-01-26T14:08:07.382 helix_lsp::transport [ERROR] gpt err <- "      at positionalUpdate (/$bunfs/root/helix-gpt:238:19)\n"
2024-01-26T14:08:07.382 helix_lsp::transport [ERROR] gpt err <- "      at /$bunfs/root/helix-gpt:232:9\n"
2024-01-26T14:08:07.382 helix_lsp::transport [ERROR] gpt err <- "      at forEach (:1:21)\n"
2024-01-26T14:08:07.382 helix_lsp::transport [ERROR] gpt err <- "      at /$bunfs/root/helix-gpt:231:7\n"
2024-01-26T14:08:07.382 helix_lsp::transport [ERROR] gpt err <- "      at /$bunfs/root/helix-gpt:230:41\n"
2024-01-26T14:08:07.382 helix_lsp::transport [ERROR] gpt err <- "      at /$bunfs/root/helix-gpt:260:9\n"
2024-01-26T14:08:07.382 helix_lsp::transport [ERROR] gpt err <- "      at /$bunfs/root/helix-gpt:258:39\n"
2024-01-26T14:08:07.382 helix_lsp::transport [ERROR] gpt err <- "      at emit (node:events:154:95)\n"
2024-01-26T14:08:07.382 helix_lsp::transport [ERROR] gpt err <- "      at /$bunfs/root/helix-gpt:319:7\n"
2024-01-26T14:08:07.382 helix_lsp::transport [ERROR] gpt err <- "      at receiveLine (/$bunfs/root/helix-gpt:313:23)\n"
2024-01-26T14:08:07.666 helix_lsp::transport [ERROR] gpt err <- "233 |       });\n"
2024-01-26T14:08:07.666 helix_lsp::transport [ERROR] gpt err <- "234 |       ctx.contentVersion = request.params.textDocument.version;\n"
2024-01-26T14:08:07.666 helix_lsp::transport [ERROR] gpt err <- "235 |     });\n"
2024-01-26T14:08:07.666 helix_lsp::transport [ERROR] gpt err <- "236 |   }\n"
2024-01-26T14:08:07.666 helix_lsp::transport [ERROR] gpt err <- "237 |   positionalUpdate(text, range) {\n"
2024-01-26T14:08:07.666 helix_lsp::transport [ERROR] gpt err <- "238 |     const lines = this.contents.split(\"\\n\");\n"
2024-01-26T14:08:07.666 helix_lsp::transport [ERROR] gpt err <- "                        ^\n"
2024-01-26T14:08:07.666 helix_lsp::transport [ERROR] gpt err <- "TypeError: undefined is not an object (evaluating 'this.contents.split')\n"
2024-01-26T14:08:07.666 helix_lsp::transport [ERROR] gpt err <- "      at positionalUpdate (/$bunfs/root/helix-gpt:238:19)\n"
2024-01-26T14:08:07.666 helix_lsp::transport [ERROR] gpt err <- "      at /$bunfs/root/helix-gpt:232:9\n"
2024-01-26T14:08:07.666 helix_lsp::transport [ERROR] gpt err <- "      at forEach (:1:21)\n"
2024-01-26T14:08:07.666 helix_lsp::transport [ERROR] gpt err <- "      at /$bunfs/root/helix-gpt:231:7\n"
2024-01-26T14:08:07.666 helix_lsp::transport [ERROR] gpt err <- "      at /$bunfs/root/helix-gpt:230:41\n"
2024-01-26T14:08:07.666 helix_lsp::transport [ERROR] gpt err <- "      at /$bunfs/root/helix-gpt:260:9\n"
2024-01-26T14:08:07.667 helix_lsp::transport [ERROR] gpt err <- "      at /$bunfs/root/helix-gpt:258:39\n"
2024-01-26T14:08:07.667 helix_lsp::transport [ERROR] gpt err <- "      at emit (node:events:154:95)\n"
2024-01-26T14:08:07.667 helix_lsp::transport [ERROR] gpt err <- "      at /$bunfs/root/helix-gpt:319:7\n"
2024-01-26T14:08:07.667 helix_lsp::transport [ERROR] gpt err <- "      at receiveLine (/$bunfs/root/helix-gpt:313:23)\n"

My languages.toml contains

[language-server.gpt]
command = "/opt/homebrew/bin/helix-gpt" 
config = {}
args = ["--logFile", "/tmp/helix-gpt.log", "--handler", "copilot", "--copilotApiKey", "xxxx"]

[[language]]
name = "python"
scope = "source.python"
language-servers = [ "gpt" ]

/opt/homebrew/bin/helix-gpt exists and is executable (I ran /opt/homebrew/bin/helix-gpt --authCopilot to get the copilot key).

An excerpt from the log shows

APP 2024-01-26T13:08:04.264Z --> sent request | {"jsonrpc":"2.0","method":"initialize","id":0,"result":{"capabilities":{"completionProvider":{"resolveProvider":false,"triggerCharacters":["{","(",")","=",">"," ",",",":",".","<","/"]},"textDocumentSync":{"change":2}}}}

APP 2024-01-26T13:08:04.265Z --> failed to parse line: | JSON Parse error: Unable to parse JSON string | Content-Length: 52

{"jsonrpc":"2.0","method":"initialized","params":{}}Content-Length: 3888

{"jsonrpc":"2.0","method":"textDocument/didOpen","params":{"textDocument":{"languageId":"python","text":"import base64\nimport json\nimport os\nfrom pathlib import Path\nfrom typing import Any, Dict\n\nfrom utilities.parser import parse_query\nfrom utilities.salesforce import Salesforce\nfrom google.cloud import storage\nfrom google.cloud import pubsub_v1\nfrom google.oauth2 import service_account\nimport logging\n\nfrom google.cloud.bigquery import Client\n\n\nqueries = {p.stem: p.open().read() for p in Path(\"queries\").glob(\"*.sql\")}\n\ndef get_salesforce_client() -> Salesforce:\n    blob: str = (\n        storage.Client(project=os.environ[\"GOOGLE_CLOUD_PROJECT\"])\n        .get_bucket(os.environ[\"STORAGE_BUCKET\"])\n        .get_blob(os.environ[\"APP\"])\n        .download_as_string()\n    )\n\n    parsed = json.loads(blob)\n\n    conn_login = parsed[\"conn_login\"]\n    conn_password = parsed[\"conn_password\"]\n    conn_security_token = parsed[\"conn_security_token\"]\n    conn_host = parsed[\"conn_host\"]\n\n    return Salesforce(\n        conn_login=conn_login,\n        conn_password=conn_password,\n        conn_security_token=conn_security_token,\n        conn_host=conn_host,\n    )\n\n\ndef get_pandas_gbq_credentials():\n    blob: str = (\n        storage.Client(project=os.environ[\"GOOGLE_CLOUD_PROJECT\"])\n        .get_bucket(os.environ[\"STORAGE_BUCKET\"])\n        .get_blob(\"pandas.json\")\n        .download_as_string()\n    )\n\n    account_info = json.loads(blob)\n\n    credentials = service_account.Credentials.from_service_account_info(account_info)\n\n    return credentials\n\n\ndef schedule_events(event, context):\n    \"\"\"\n    Publish the tables that need to be fetched from salesforce\n\n    :param dict event: The dictionary with data specific to this type of\n         event. The `data` field contains the PubsubMessage message. The\n         `attributes` field will contain custom attributes if there are any.\n    :param google.cloud.functions.Context context: The Cloud Functions event\n         metadata. The `event_id` field contains the Pub/Sub message ID. The\n         `timestamp` field contains the publish time.\n    \"\"\"\n    topic = os.environ[\"APP\"]\n    project_id = os.environ[\"GOOGLE_CLOUD_PROJECT\"]\n    publisher = pubsub_v1.PublisherClient()\n    topic_name = f\"projects/{project_id}/topics/{topic}\"\n\n    for table in queries.keys():\n        publisher.publish(topic_name, bytes(table, \"utf-8\"))\n\n\ndef import_salesforce(event: Dict[str, Any], context):\n    \"\"\"\n    Import the tables from salesforce to bigquery\n\n    :param event: The dictionary with data specific to this type of\n         event. The `data` field contains the PubsubMessage message. The\n         `attributes` field will contain custom attributes if there are any.\n    :param context: The Cloud Functions event\n         metadata. The `event_id` field contains the Pub/Sub message ID. The\n         `timestamp` field contains the publish time.\n    \"\"\"\n\n    sf = get_salesforce_client()\n\n    if \"data\" in event:\n        table_name = base64.b64decode(event[\"data\"]).decode(\"utf-8\")\n        query = queries.get(table_name)\n\n        if query:\n            parsed = parse_query(\n                query, strip_aliases=True, strip_escapes=False, strip_comments=True\n            )\n            table: str = parsed.table\n            logging.info(\"Importing %s\" % table)\n            df = sf.query_df(soql=query)\n            Salesforce.insert_in_bq(\n                df,\n                destination_table=f\"salesforce_xebia.{table}\",\n                project_id=os.environ[\"GOOGLE_CLOUD_PROJECT\"],\n                credentials=get_pandas_gbq_credentials(),\n            )\n            logging.info(\"Finished %s\" % table)\n","uri":"file:///Users/gio/code/salesforce-to-bigquery/main.py","version":0}}}Content-Length: 86

{"jsonrpc":"2.0","method":"workspace/didChangeConfiguration","params":{"settings":{}}}

APP 2024-01-26T13:08:07.750Z --> received request: | {"jsonrpc":"2.0","method":"textDocument/completion","params":{"position":{"character":2,"line":15},"textDocument":{"uri":"file:///Users/gio/code/salesforce-to-bigquery/main.py"}},"id":1}

APP 2024-01-26T13:08:07.877Z --> received request: | {"jsonrpc":"2.0","method":"textDocument/completion","params":{"position":{"character":3,"line":15},"textDocument":{"uri":"file:///Users/gio/code/salesforce-to-bigquery/main.py"}},"id":2}

What could be wrong?

About this issue

  • Original URL
  • State: closed
  • Created 5 months ago
  • Comments: 17 (8 by maintainers)

Most upvoted comments

@lemontheme Good shout, I’ve removed that log in 0.12 now.

Also, the 400 requests should be resolved as well. It wasn’t passing the correct filepath.

Okay, so this is weird: Just now I did get a single completion. Green dot with a notification in the top right corner: ‘fetching completions’. After that, I wasn’t able to get any other completions. No idea why it was a one-time thing.

Could you run helix in verbose mode if you aren’t already hx -v file.ts and post the output of the 2 log files again please. Mainly the .cache one, but from before it starts just so I can see what’s happening when the didOpen event fires.