update(llama-api)

Add llama3 support for all calls, rather than openai chatgpt
This commit is contained in:
iheuzio 2024-05-19 17:54:26 -04:00
parent a1a8c44824
commit f0a9044c42
5 changed files with 3508 additions and 101 deletions

3514
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -60,7 +60,7 @@
"title": "Clear Selected Files"
}
],
"viewsContainers" : {
"viewsContainers": {
"activitybar": [
{
"id": "gpt-contextfiles-sidebar-view",
@ -103,6 +103,7 @@
"test": "node ./test/runTest.js"
},
"devDependencies": {
"@jest/globals": "^29.7.0",
"@types/glob": "^8.1.0",
"@types/mocha": "^10.0.1",
"@types/node": "20.2.5",
@ -110,11 +111,16 @@
"@vscode/test-electron": "^2.3.2",
"eslint": "^8.41.0",
"glob": "^8.1.0",
"jest": "^29.7.0",
"mocha": "^10.2.0",
"ts-jest": "^29.1.2",
"typescript": "^5.1.3"
},
"dependencies": {
"openai": "^3.3.0"
"axios": "^1.7.0",
"chai": "^5.1.1",
"openai": "^3.3.0",
"vscode": "^1.1.37"
},
"repository": {
"type": "git",

View File

@ -1,15 +1,8 @@
const vscode = require('vscode');
const { Configuration, OpenAIApi } = require("openai");
const axios = require('axios');
const FileDataProvider = require('./fileDataProvider');
const { getWebviewContent } = require('./webviewPanel');
const configuration = new Configuration({
apiKey: process.env.OPENAI_API_KEY,
});
const openai = new OpenAIApi(configuration);
const selectedFiles = FileDataProvider.selectedFiles;
const fileDataProvider = new FileDataProvider.FileDataProvider();
@ -34,25 +27,24 @@ async function handleQuestionSubmission(panel, question, selectedUris) {
})
.join('\n\n');
// Call OpenAI API with the question and file contents
// Call Llama3 API with the question and file contents
try {
const chatCompletion = await openai.createChatCompletion({
model: "gpt-3.5-turbo-16k",
const response = await axios.post('http://127.0.0.1:11434/api/chat', {
model: "llama3",
messages: [
{ role: "system", content: "Answer the coding questions, only provide the code and documentation, explaining the solution after providing the code. Put codeblocks inside ``` code ``` with file names above each snippet." },
{ role: "user", content: question + "\n" + fileContents},
],
});
// Extract the answer from the OpenAI response
const answer = chatCompletion.data.choices[0].message.content;
// Extract the answer from the Llama3 response
const answer = response.data.choices[0].message.content;
// Update the webview content to display only the OpenAI response
// Update the webview content to display only the Llama3 response
panel.webview.html = getWebviewContent(answer, question);
} catch (error) {
// Handle any errors from the OpenAI API
console.error("Failed to get OpenAI response:", error);
panel.webview.html = getWebviewContent(`Failed to get response from OpenAI API. Error: ${error.message}`, question);
// Handle any errors from the Llama3 API
console.error("Failed to get Llama3 response:", error);
panel.webview.html = getWebviewContent(`Failed to get response from Llama3 API. Error: ${error.message}`, question);
}
}

View File

@ -1,23 +0,0 @@
const path = require('path');
const { runTests } = require('@vscode/test-electron');
async function main() {
try {
// The folder containing the Extension Manifest package.json
// Passed to `--extensionDevelopmentPath`
const extensionDevelopmentPath = path.resolve(__dirname, '../');
// The path to the extension test script
// Passed to --extensionTestsPath
const extensionTestsPath = path.resolve(__dirname, './suite/index');
// Download VS Code, unzip it and run the integration test
await runTests({ extensionDevelopmentPath, extensionTestsPath });
} catch (err) {
console.error('Failed to run tests', err);
process.exit(1);
}
}
main();

18
test/test.js Normal file
View File

@ -0,0 +1,18 @@
const axios = require('axios');
const { expect } = require('@jest/globals');
test('Llama3 Generate Test', () => {
return axios.post('http://localhost:11434/api/generate', {
model: "llama3",
prompt: "Why is the sky blue?"
})
.then(response => {
console.log(response.data); // Log the response data
expect(response.data).toBeTruthy();
// Add more assertions based on the expected structure of the response data
})
.catch(error => {
console.error(error);
});
}, 10000); // Increase timeout to 10 seconds