Skip to content

Commit 9520f03

Browse files
Add GitHub Copilot integration
1 parent a82eecc commit 9520f03

File tree

6 files changed

+285
-0
lines changed

6 files changed

+285
-0
lines changed
Lines changed: 162 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,162 @@
1+
from subprocess import PIPE, Popen
2+
from typing import Optional
3+
4+
import pylspclient
5+
from jupyter_ai import BaseProvider, TextField
6+
from jupyter_ai import __version__ as jupyter_ai_version
7+
from jupyter_ai_magics.models.completion import (
8+
InlineCompletionList,
9+
InlineCompletionReply,
10+
InlineCompletionRequest,
11+
)
12+
from jupyterlab import __version__ as jupyterlab_version
13+
14+
INIT_PARAMS = {
15+
"capabilities": {"workspace": {"workspaceFolders": False}},
16+
"initializationOptions": {
17+
"editorInfo": {"name": "JupyterLab", "version": jupyterlab_version},
18+
"editorPluginInfo": {"name": "jupyter-ai", "version": jupyter_ai_version},
19+
},
20+
}
21+
22+
23+
def calc_position_lineno_and_char(prefix, suffix):
24+
"""
25+
Calculate the line number and character position within a text based on a given prefix and suffix text.
26+
GitHub Copilot LSP requires those positions for completion requests.
27+
https://www.npmjs.com/package/@github/copilot-language-server#panel-completions
28+
"""
29+
30+
full_text = prefix + suffix
31+
32+
lineno = full_text.count("\n", 0, len(prefix))
33+
prefix_text = "\n".join(full_text.split("\n")[:lineno])
34+
char_pos = len(prefix) - len(prefix_text) - 1
35+
36+
return lineno, char_pos
37+
38+
39+
class GitHubCopilotLLM:
40+
process: Optional[Popen] = None
41+
42+
def __init__(self, lsp_bin_path: str):
43+
self.lsp_bin_path = lsp_bin_path
44+
self.ensure_lsp_server_initialized()
45+
46+
def _initialize(self):
47+
self.lsp_endpoint.call_method(method_name="initialize", **INIT_PARAMS)
48+
self.lsp_endpoint.send_notification(method_name="initialized")
49+
50+
def _signin(self):
51+
self.ensure_lsp_server_initialized()
52+
res = self.lsp_endpoint.call_method(
53+
method_name="signIn",
54+
)
55+
return res
56+
57+
def _signout(self):
58+
self.ensure_lsp_server_initialized()
59+
res = self.lsp_endpoint.call_method(
60+
method_name="signOut",
61+
)
62+
return res
63+
64+
def _completion(self, code, pos_line, pos_char):
65+
self.ensure_lsp_server_initialized()
66+
self.lsp_endpoint.send_notification(
67+
method_name="textDocument/didOpen",
68+
**{
69+
"textDocument": {
70+
"uri": "file:///dummy",
71+
"version": 0,
72+
"languageId": "python",
73+
"text": code,
74+
}
75+
},
76+
)
77+
78+
res = self.lsp_endpoint.call_method(
79+
method_name="textDocument/copilotPanelCompletion",
80+
**{
81+
"textDocument": {
82+
"uri": "file:///dummy",
83+
"version": 0,
84+
},
85+
"position": {
86+
"line": pos_line,
87+
"character": pos_char,
88+
},
89+
},
90+
)
91+
return res
92+
93+
def _start_lsp_server(self):
94+
if not self.is_lsp_server_running:
95+
self.process = Popen(
96+
[self.lsp_bin_path, "--stdio"], stdin=PIPE, stdout=PIPE, stderr=PIPE
97+
)
98+
self.json_rpc_endpoint = pylspclient.JsonRpcEndpoint(
99+
self.process.stdin, self.process.stdout
100+
)
101+
self.lsp_endpoint = pylspclient.LspEndpoint(
102+
self.json_rpc_endpoint, timeout=15
103+
)
104+
self.lsp_endpoint.start()
105+
106+
def _stop_lsp_server(self):
107+
self.lsp_endpoint.stop()
108+
self.process.kill()
109+
110+
def ensure_lsp_server_initialized(self):
111+
if not self.is_lsp_server_running:
112+
self._start_lsp_server()
113+
self._initialize()
114+
115+
@property
116+
def is_lsp_server_running(self):
117+
return self.process is not None and self.process.poll() is None
118+
119+
@property
120+
def _llm_type(self) -> str:
121+
return "github-copilot"
122+
123+
124+
class GitHubCopilotProvider(BaseProvider):
125+
id = "github-copilot"
126+
name = "GitHub Copilot"
127+
models = ["*"]
128+
model_id_key = "model"
129+
pypi_package_deps = ["pylspclient"]
130+
help = (
131+
"Make sure you've installed copilot-language-server [https://www.npmjs.com/package/@github/copilot-language-server](https://www.npmjs.com/package/@github/copilot-language-server) . "
132+
"Set this absolute path to `lsp_bin_path`."
133+
)
134+
fields = [
135+
TextField(
136+
key="lsp_bin_path", label="Copilot LSP binary absolute path", format="text"
137+
),
138+
]
139+
140+
def __init__(
141+
self,
142+
**kwargs,
143+
):
144+
super().__init__(**kwargs)
145+
self._llm = GitHubCopilotLLM(lsp_bin_path=self.lsp_bin_path)
146+
147+
async def generate_inline_completions(self, request: InlineCompletionRequest):
148+
self._llm.ensure_lsp_server_initialized()
149+
150+
full_text = request.prefix + request.suffix
151+
lineno, char = calc_position_lineno_and_char(request.prefix, request.suffix)
152+
suggestions = self._llm._completion(full_text, lineno, char)
153+
completions = [
154+
{
155+
"insertText": item["insertText"][char:],
156+
}
157+
for item in suggestions["items"]
158+
]
159+
return InlineCompletionReply(
160+
list=InlineCompletionList(items=completions),
161+
reply_to=request.number,
162+
)
Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
import unittest
2+
from subprocess import PIPE
3+
from unittest.mock import MagicMock, patch
4+
5+
from jupyter_ai_magics.models.completion import InlineCompletionRequest
6+
from jupyter_ai_magics.partner_providers.copilot import (
7+
GitHubCopilotLLM,
8+
GitHubCopilotProvider,
9+
calc_position_lineno_and_char,
10+
)
11+
12+
13+
class TestGitHubCopilotLLM(unittest.TestCase):
14+
@patch("jupyter_ai_magics.partner_providers.copilot.Popen")
15+
@patch("jupyter_ai_magics.partner_providers.copilot.pylspclient.JsonRpcEndpoint")
16+
@patch("jupyter_ai_magics.partner_providers.copilot.pylspclient.LspEndpoint")
17+
def test_initialize_lsp_server(
18+
self, mock_lsp_endpoint, mock_json_rpc_endpoint, mock_popen
19+
):
20+
mock_process = MagicMock()
21+
mock_popen.return_value = mock_process
22+
mock_endpoint = MagicMock()
23+
mock_lsp_endpoint.return_value = mock_endpoint
24+
25+
llm = GitHubCopilotLLM(lsp_bin_path="dummy_path")
26+
27+
mock_popen.assert_called_once_with(
28+
["dummy_path", "--stdio"], stdin=PIPE, stdout=PIPE, stderr=PIPE
29+
)
30+
mock_json_rpc_endpoint.assert_called_once_with(
31+
mock_process.stdin, mock_process.stdout
32+
)
33+
mock_lsp_endpoint.assert_called_once_with(
34+
mock_json_rpc_endpoint.return_value, timeout=15
35+
)
36+
mock_endpoint.start.assert_called_once()
37+
38+
def test_calc_position_lineno_and_char(self):
39+
prefix = "line1\nline2\n"
40+
suffix = "line3\nline4"
41+
lineno, char_pos = calc_position_lineno_and_char(prefix, suffix)
42+
43+
self.assertEqual(lineno, 2)
44+
self.assertEqual(char_pos, 0)
45+
46+
47+
class TestGitHubCopilotProvider(unittest.TestCase):
48+
@patch("jupyter_ai_magics.partner_providers.copilot.GitHubCopilotLLM")
49+
def test_generate_inline_completions(self, mock_llm_class):
50+
mock_llm = MagicMock()
51+
mock_llm_class.return_value = mock_llm
52+
mock_llm._completion.return_value = {
53+
"items": [{"insertText": "completion1"}, {"insertText": "completion2"}]
54+
}
55+
56+
provider = GitHubCopilotProvider(
57+
lsp_bin_path="dummy_path", model_id="github-copilot"
58+
)
59+
result = provider._llm._completion("print()", 0, 6)
60+
61+
self.assertEqual(len(result["items"]), 2)
62+
self.assertEqual(result["items"][0]["insertText"], "completion1")
63+
self.assertEqual(result["items"][1]["insertText"], "completion2")
64+
65+
66+
if __name__ == "__main__":
67+
unittest.main()

packages/jupyter-ai-magics/pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -84,6 +84,7 @@ together-ai = "jupyter_ai_magics:TogetherAIProvider"
8484
gemini = "jupyter_ai_magics.partner_providers.gemini:GeminiProvider"
8585
mistralai = "jupyter_ai_magics.partner_providers.mistralai:MistralAIProvider"
8686
openrouter = "jupyter_ai_magics.partner_providers.openrouter:OpenRouterProvider"
87+
github-copilot = "jupyter_ai_magics.partner_providers.copilot:GitHubCopilotProvider"
8788

8889
[project.entry-points."jupyter_ai.embeddings_model_providers"]
8990
azure = "jupyter_ai_magics.partner_providers.openai:AzureOpenAIEmbeddingsProvider"

packages/jupyter-ai/jupyter_ai/chat_handlers/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from .ask import AskChatHandler
22
from .base import BaseChatHandler, SlashCommandRoutingType
3+
from .copilot import GitHubCopilotChatHandler
34
from .default import DefaultChatHandler
45
from .generate import GenerateChatHandler
56
from .help import HelpChatHandler
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
from jupyterlab_chat.models import Message
2+
3+
from .base import BaseChatHandler, SlashCommandRoutingType
4+
5+
MESSAGE_TO_ASK_SIGNIN = """
6+
<div>Your Device Code is <b><code>{userCode}</code></b></div>
7+
<div>Please go to <a href="{verificationUri}">{verificationUri}</a> and authorize using the above code.</div>
8+
"""
9+
10+
MESSAGE_ALREADY_SIGNIN = """<div>You've already signed in as <b>{user}</b></div>"""
11+
12+
13+
class GitHubCopilotChatHandler(BaseChatHandler):
14+
15+
id = "github"
16+
name = "GitHub"
17+
help = "GitHub"
18+
routing_type = SlashCommandRoutingType(slash_id="github")
19+
20+
uses_llm = True
21+
22+
def __init__(self, *args, **kwargs):
23+
super().__init__(*args, **kwargs)
24+
25+
async def process_message(self, message: Message):
26+
splitted_message = message.body.split(" ")
27+
if len(splitted_message) > 1:
28+
sub_command = splitted_message[1]
29+
30+
lm_provider_class = self.config_manager.lm_provider
31+
lm_provider_params = self.config_manager.lm_provider_params
32+
lm_provider = lm_provider_class(**lm_provider_params)
33+
copilot_llm = lm_provider._llm
34+
copilot_llm.ensure_lsp_server_initialized()
35+
SUBCOMMANDS = ["signin", "signout"]
36+
37+
if sub_command not in SUBCOMMANDS:
38+
self.reply(
39+
f"""
40+
<div>Unknown subcommand. Available subcommands: {SUBCOMMANDS}</div>
41+
"""
42+
)
43+
else:
44+
if sub_command == "signin":
45+
res = copilot_llm._signin()
46+
if res.get("status") == "AlreadySignedIn":
47+
self.reply(MESSAGE_ALREADY_SIGNIN.format(**res))
48+
else:
49+
self.reply(MESSAGE_TO_ASK_SIGNIN.format(**res))
50+
elif sub_command == "signout":
51+
res = copilot_llm._signout()
52+
if res.get("status") == "NotSignedIn":
53+
self.reply(f"You have signed out.")

packages/jupyter-ai/pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ ask = "jupyter_ai.chat_handlers.ask:AskChatHandler"
5151
generate = "jupyter_ai.chat_handlers.generate:GenerateChatHandler"
5252
learn = "jupyter_ai.chat_handlers.learn:LearnChatHandler"
5353
help = "jupyter_ai.chat_handlers.help:HelpChatHandler"
54+
github = "jupyter_ai.chat_handlers.copilot:GitHubCopilotChatHandler"
5455

5556
[project.optional-dependencies]
5657
test = [

0 commit comments

Comments
 (0)